Compare commits
45 Commits
developmen
...
ghivert/re
Author | SHA1 | Date |
---|---|---|
Guillaume Hivert | 8400204d95 | |
Guillaume Hivert | 0d057fb2d0 | |
Guillaume Hivert | 86c8c77dd2 | |
Guillaume Hivert | 52284a871a | |
Guillaume Hivert | a2d3dfeb21 | |
Guillaume Hivert | 73e0150612 | |
Guillaume Hivert | 89d5970aac | |
Guillaume Hivert | 8f4870f5a2 | |
Guillaume Hivert | 11a9032411 | |
Guillaume Hivert | 8e11405cf7 | |
Guillaume Hivert | 4b76407edd | |
Guillaume Hivert | 10a2a09a79 | |
Guillaume Hivert | a27379559c | |
Guillaume Hivert | 797f61d6c0 | |
Guillaume Hivert | 1a63e07297 | |
Guillaume Hivert | 403d6d3e79 | |
Guillaume Hivert | b5ed8b39d5 | |
Guillaume Hivert | f4fcc4b773 | |
Guillaume Hivert | d9d91596c0 | |
Guillaume Hivert | f2c343e8da | |
Guillaume Hivert | 952594945a | |
Guillaume Hivert | da48725ace | |
Guillaume Hivert | 13ecfd0c56 | |
Guillaume Hivert | 8ff5bf713a | |
Guillaume Hivert | bde858d8a7 | |
Guillaume Hivert | 841d4e4823 | |
Guillaume Hivert | 9069bd9c61 | |
Guillaume Hivert | 55b003c00f | |
Guillaume Hivert | 21fc1b7c64 | |
Guillaume Hivert | 8723084507 | |
Guillaume Hivert | 9f0a16c779 | |
Guillaume Hivert | d1cbd6b33f | |
Guillaume Hivert | 48cf48c8eb | |
Guillaume Hivert | b9a38fdbe0 | |
Guillaume Hivert | 5d5393bf4e | |
Guillaume Hivert | f05701d5b8 | |
Guillaume Hivert | df67f0ac35 | |
Guillaume Hivert | 109c3dc1f0 | |
Guillaume Hivert | 71da9c5ac6 | |
Guillaume Hivert | 6e02185ea0 | |
Guillaume Hivert | 948e724c31 | |
Guillaume Hivert | 377f52aad4 | |
Guillaume Hivert | db99440bce | |
Guillaume Hivert | cb2853a388 | |
Guillaume Hivert | df4b2e8eae |
|
@ -1,6 +1 @@
|
|||
{
|
||||
"extends": "scality",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2020
|
||||
}
|
||||
}
|
||||
{ "extends": "scality" }
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
---
|
||||
name: codeQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [development/*, stabilization/*, hotfix/*]
|
||||
pull_request:
|
||||
branches: [development/*, stabilization/*, hotfix/*]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Static analysis with CodeQL
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: javascript, typescript
|
||||
|
||||
- name: Build and analyze
|
||||
uses: github/codeql-action/analyze@v3
|
|
@ -1,16 +0,0 @@
|
|||
---
|
||||
name: dependency review
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [development/*, stabilization/*, hotfix/*]
|
||||
|
||||
jobs:
|
||||
dependency-review:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Checkout Repository'
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 'Dependency Review'
|
||||
uses: actions/dependency-review-action@v4
|
|
@ -25,18 +25,18 @@ jobs:
|
|||
- 6379:6379
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'yarn'
|
||||
- name: install dependencies
|
||||
run: yarn install --frozen-lockfile --prefer-offline --network-concurrency 1
|
||||
run: yarn cache clean && yarn install --frozen-lockfile
|
||||
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
||||
- name: lint yaml
|
||||
run: yarn --silent lint_yml
|
||||
- name: lint javascript
|
||||
run: yarn --silent lint --max-warnings 0
|
||||
run: yarn --silent lint -- --max-warnings 0
|
||||
- name: lint markdown
|
||||
run: yarn --silent lint_md
|
||||
- name: add hostname
|
||||
|
@ -46,9 +46,7 @@ jobs:
|
|||
run: yarn --silent coverage
|
||||
- name: run functional tests
|
||||
run: yarn ft_test
|
||||
- uses: codecov/codecov-action@v4
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
- uses: codecov/codecov-action@v2
|
||||
- name: run executables tests
|
||||
run: yarn install && yarn test
|
||||
working-directory: 'lib/executables/pensieveCreds/'
|
||||
|
@ -59,20 +57,20 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v2
|
||||
- name: Install NodeJS
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: yarn
|
||||
- name: Install dependencies
|
||||
run: yarn install --frozen-lockfile --prefer-offline
|
||||
run: yarn cache clean && yarn install --frozen-lockfile
|
||||
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
||||
- name: Compile
|
||||
run: yarn build
|
||||
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
||||
- name: Upload artifacts
|
||||
uses: scality/action-artifacts@v4
|
||||
uses: scality/action-artifacts@v2
|
||||
with:
|
||||
url: https://artifacts.scality.net
|
||||
user: ${{ secrets.ARTIFACTS_USER }}
|
||||
|
|
12
.swcrc
12
.swcrc
|
@ -1,12 +0,0 @@
|
|||
{
|
||||
"$schema": "https://swc.rs/schema.json",
|
||||
"jsc": {
|
||||
"parser": {
|
||||
"syntax": "typescript"
|
||||
},
|
||||
"target": "es2017"
|
||||
},
|
||||
"module": {
|
||||
"type": "commonjs"
|
||||
}
|
||||
}
|
|
@ -178,83 +178,3 @@ this._serverSideEncryption.configuredMasterKeyId = configuredMasterKeyId || unde
|
|||
### Usage
|
||||
|
||||
Used to store the users configured KMS key id
|
||||
|
||||
## Model version 15
|
||||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._tags = tags || null;
|
||||
```
|
||||
|
||||
The Tag Set of a bucket is an array of objects with Key and Value:
|
||||
|
||||
```javascript
|
||||
[
|
||||
{
|
||||
Key: 'something',
|
||||
Value: 'some_data'
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## Model version 16
|
||||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._capabilities = capabilities || undefined;
|
||||
```
|
||||
|
||||
For capacity-enabled buckets, contains the following data:
|
||||
|
||||
```javascript
|
||||
{
|
||||
_capabilities: {
|
||||
VeeamSOSApi?: {
|
||||
SystemInfo?: {
|
||||
ProtocolVersion: String,
|
||||
ModelName: String,
|
||||
ProtocolCapabilities: {
|
||||
CapacityInfo: Boolean,
|
||||
UploadSessions: Boolean,
|
||||
IAMSTS: Boolean,
|
||||
},
|
||||
APIEndpoints: {
|
||||
IAMEndpoint: String,
|
||||
STSEndpoint: String,
|
||||
},
|
||||
SystemRecommendations?: {
|
||||
S3ConcurrentTaskLimit: Number,
|
||||
S3MultiObjectDelete: Number,
|
||||
StorageCurrentTasksLimit: Number,
|
||||
KbBlockSize: Number,
|
||||
}
|
||||
LastModified?: String,
|
||||
},
|
||||
CapacityInfo?: {
|
||||
Capacity: Number,
|
||||
Available: Number,
|
||||
Used: Number,
|
||||
LastModified?: String,
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to store bucket tagging
|
||||
|
||||
## Model version 17
|
||||
|
||||
### Properties Added
|
||||
|
||||
```javascript
|
||||
this._quotaMax = quotaMax || 0;
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
Used to store bucket quota
|
|
@ -1,27 +0,0 @@
|
|||
# Delimiter
|
||||
|
||||
The Delimiter class handles raw listings from the database with an
|
||||
optional delimiter, and fills in a curated listing with "Contents" and
|
||||
"CommonPrefixes" as a result.
|
||||
|
||||
## Expected Behavior
|
||||
|
||||
- only lists keys belonging to the given **prefix** (if provided)
|
||||
|
||||
- groups listed keys that have a common prefix ending with a delimiter
|
||||
inside CommonPrefixes
|
||||
|
||||
- can take a **marker** or **continuationToken** to list from a specific key
|
||||
|
||||
- can take a **maxKeys** parameter to limit how many keys can be returned
|
||||
|
||||
## State Chart
|
||||
|
||||
- States with grey background are *Idle* states, which are waiting for
|
||||
a new listing key
|
||||
|
||||
- States with blue background are *Processing* states, which are
|
||||
actively processing a new listing key passed by the filter()
|
||||
function
|
||||
|
||||
![Delimiter State Chart](./pics/delimiterStateChart.svg)
|
|
@ -1,45 +0,0 @@
|
|||
# DelimiterMaster
|
||||
|
||||
The DelimiterMaster class handles raw listings from the database of a
|
||||
versioned or non-versioned bucket with an optional delimiter, and
|
||||
fills in a curated listing with "Contents" and "CommonPrefixes" as a
|
||||
result.
|
||||
|
||||
## Expected Behavior
|
||||
|
||||
- only lists latest versions of versioned buckets
|
||||
|
||||
- only lists keys belonging to the given **prefix** (if provided)
|
||||
|
||||
- does not list latest versions that are delete markers
|
||||
|
||||
- groups listed keys that have a common prefix ending with a delimiter
|
||||
inside CommonPrefixes
|
||||
|
||||
- can take a **marker** or **continuationToken** to list from a specific key
|
||||
|
||||
- can take a **maxKeys** parameter to limit how many keys can be returned
|
||||
|
||||
- reconciles internal PHD keys with the next version (those are
|
||||
created when a specific version that is the latest version is
|
||||
deleted)
|
||||
|
||||
- skips internal keys like replay keys
|
||||
|
||||
## State Chart
|
||||
|
||||
- States with grey background are *Idle* states, which are waiting for
|
||||
a new listing key
|
||||
|
||||
- States with blue background are *Processing* states, which are
|
||||
actively processing a new listing key passed by the filter()
|
||||
function
|
||||
|
||||
### Bucket Vformat=v0
|
||||
|
||||
![DelimiterMaster State Chart for v0 format](./pics/delimiterMasterV0StateChart.svg)
|
||||
|
||||
### Bucket Vformat=v1
|
||||
|
||||
For buckets in versioning key format **v1**, the algorithm used is the
|
||||
one from [Delimiter](delimiter.md).
|
|
@ -1,33 +0,0 @@
|
|||
# DelimiterVersions
|
||||
|
||||
The DelimiterVersions class handles raw listings from the database of a
|
||||
versioned or non-versioned bucket with an optional delimiter, and
|
||||
fills in a curated listing with "Versions" and "CommonPrefixes" as a
|
||||
result.
|
||||
|
||||
## Expected Behavior
|
||||
|
||||
- lists individual distinct versions of versioned buckets
|
||||
|
||||
- only lists keys belonging to the given **prefix** (if provided)
|
||||
|
||||
- groups listed keys that have a common prefix ending with a delimiter
|
||||
inside CommonPrefixes
|
||||
|
||||
- can take a **keyMarker** and optionally a **versionIdMarker** to
|
||||
list from a specific key or version
|
||||
|
||||
- can take a **maxKeys** parameter to limit how many keys can be returned
|
||||
|
||||
- skips internal keys like replay keys
|
||||
|
||||
## State Chart
|
||||
|
||||
- States with grey background are *Idle* states, which are waiting for
|
||||
a new listing key
|
||||
|
||||
- States with blue background are *Processing* states, which are
|
||||
actively processing a new listing key passed by the filter()
|
||||
function
|
||||
|
||||
![DelimiterVersions State Chart](./pics/delimiterVersionsStateChart.svg)
|
|
@ -1,45 +0,0 @@
|
|||
digraph {
|
||||
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
||||
edge [fontsize=14];
|
||||
rankdir=TB;
|
||||
|
||||
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
||||
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
||||
|
||||
node [fillcolor="lightgrey"];
|
||||
"NotSkippingPrefixNorVersions.Idle" [label="NotSkippingPrefixNorVersions",group="NotSkippingPrefixNorVersions",width=4];
|
||||
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||
"SkippingVersions.Idle" [label="SkippingVersions",group="SkippingVersions"];
|
||||
"WaitVersionAfterPHD.Idle" [label="WaitVersionAfterPHD",group="WaitVersionAfterPHD"];
|
||||
|
||||
node [fillcolor="lightblue"];
|
||||
"NotSkippingPrefixNorVersions.Processing" [label="NotSkippingPrefixNorVersions",group="NotSkippingPrefixNorVersions",width=4];
|
||||
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||
"SkippingVersions.Processing" [label="SkippingVersions",group="SkippingVersions"];
|
||||
"WaitVersionAfterPHD.Processing" [label="WaitVersionAfterPHD",group="WaitVersionAfterPHD"];
|
||||
|
||||
START -> "SkippingVersions.Idle" [label="[marker != undefined]"]
|
||||
START -> "NotSkippingPrefixNorVersions.Idle" [label="[marker == undefined]"]
|
||||
|
||||
"NotSkippingPrefixNorVersions.Idle" -> "NotSkippingPrefixNorVersions.Processing" [label="filter(key, value)"]
|
||||
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
||||
"SkippingVersions.Idle" -> "SkippingVersions.Processing" [label="filter(key, value)"]
|
||||
"WaitVersionAfterPHD.Idle" -> "WaitVersionAfterPHD.Processing" [label="filter(key, value)"]
|
||||
|
||||
|
||||
"NotSkippingPrefixNorVersions.Processing" -> "SkippingVersions.Idle" [label="[Version.isDeleteMarker(value)]\n-> FILTER_ACCEPT"]
|
||||
"NotSkippingPrefixNorVersions.Processing" -> "WaitVersionAfterPHD.Idle" [label="[Version.isPHD(value)]\n-> FILTER_ACCEPT"]
|
||||
"NotSkippingPrefixNorVersions.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(<ReplayPrefix>)]\n/ prefix <- <ReplayPrefix>\n-> FILTER_SKIP"]
|
||||
"NotSkippingPrefixNorVersions.Processing" -> END [label="[isListableKey(key, value) and\nKeys == maxKeys]\n-> FILTER_END"]
|
||||
"NotSkippingPrefixNorVersions.Processing" -> "SkippingPrefix.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nhasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
||||
"NotSkippingPrefixNorVersions.Processing" -> "SkippingVersions.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nnot hasDelimiter(key)]\n/ Contents.append(key, value)\n-> FILTER_ACCEPT"]
|
||||
|
||||
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
||||
"SkippingPrefix.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not key.startsWith(prefix)]"]
|
||||
|
||||
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="[isVersionKey(key)]\n-> FILTER_SKIP"]
|
||||
"SkippingVersions.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not isVersionKey(key)]"]
|
||||
|
||||
"WaitVersionAfterPHD.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[isVersionKey(key) and master(key) == PHDkey]\n/ key <- master(key)"]
|
||||
"WaitVersionAfterPHD.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not isVersionKey(key) or master(key) != PHDkey]"]
|
||||
}
|
|
@ -1,216 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<!-- Generated by graphviz version 2.43.0 (0)
|
||||
-->
|
||||
<!-- Title: %3 Pages: 1 -->
|
||||
<svg width="2313pt" height="460pt"
|
||||
viewBox="0.00 0.00 2313.37 460.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 456)">
|
||||
<title>%3</title>
|
||||
<polygon fill="white" stroke="transparent" points="-4,4 -4,-456 2309.37,-456 2309.37,4 -4,4"/>
|
||||
<!-- START -->
|
||||
<g id="node1" class="node">
|
||||
<title>START</title>
|
||||
<ellipse fill="black" stroke="black" cx="35.37" cy="-445" rx="7" ry="7"/>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Idle -->
|
||||
<g id="node3" class="node">
|
||||
<title>NotSkippingPrefixNorVersions.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M925.37,-387C925.37,-387 661.37,-387 661.37,-387 655.37,-387 649.37,-381 649.37,-375 649.37,-375 649.37,-363 649.37,-363 649.37,-357 655.37,-351 661.37,-351 661.37,-351 925.37,-351 925.37,-351 931.37,-351 937.37,-357 937.37,-363 937.37,-363 937.37,-375 937.37,-375 937.37,-381 931.37,-387 925.37,-387"/>
|
||||
<text text-anchor="middle" x="793.37" y="-365.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefixNorVersions</text>
|
||||
</g>
|
||||
<!-- START->NotSkippingPrefixNorVersions.Idle -->
|
||||
<g id="edge2" class="edge">
|
||||
<title>START->NotSkippingPrefixNorVersions.Idle</title>
|
||||
<path fill="none" stroke="black" d="M42.39,-443.31C95.3,-438.15 434.98,-404.99 638.94,-385.08"/>
|
||||
<polygon fill="black" stroke="black" points="639.54,-388.53 649.15,-384.08 638.86,-381.57 639.54,-388.53"/>
|
||||
<text text-anchor="middle" x="497.87" y="-408.8" font-family="Times,serif" font-size="14.00">[marker == undefined]</text>
|
||||
</g>
|
||||
<!-- SkippingVersions.Idle -->
|
||||
<g id="node5" class="node">
|
||||
<title>SkippingVersions.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M242.37,-138C242.37,-138 50.37,-138 50.37,-138 44.37,-138 38.37,-132 38.37,-126 38.37,-126 38.37,-114 38.37,-114 38.37,-108 44.37,-102 50.37,-102 50.37,-102 242.37,-102 242.37,-102 248.37,-102 254.37,-108 254.37,-114 254.37,-114 254.37,-126 254.37,-126 254.37,-132 248.37,-138 242.37,-138"/>
|
||||
<text text-anchor="middle" x="146.37" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||
</g>
|
||||
<!-- START->SkippingVersions.Idle -->
|
||||
<g id="edge1" class="edge">
|
||||
<title>START->SkippingVersions.Idle</title>
|
||||
<path fill="none" stroke="black" d="M33.04,-438.14C20.64,-405.9 -34.57,-248.17 33.37,-156 36.76,-151.4 40.74,-147.39 45.16,-143.89"/>
|
||||
<polygon fill="black" stroke="black" points="47.27,-146.68 53.53,-138.13 43.3,-140.92 47.27,-146.68"/>
|
||||
<text text-anchor="middle" x="85.87" y="-321.8" font-family="Times,serif" font-size="14.00">[marker != undefined]</text>
|
||||
</g>
|
||||
<!-- END -->
|
||||
<g id="node2" class="node">
|
||||
<title>END</title>
|
||||
<ellipse fill="black" stroke="black" cx="727.37" cy="-120" rx="7" ry="7"/>
|
||||
<ellipse fill="none" stroke="black" cx="727.37" cy="-120" rx="11" ry="11"/>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Processing -->
|
||||
<g id="node7" class="node">
|
||||
<title>NotSkippingPrefixNorVersions.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M925.37,-300C925.37,-300 661.37,-300 661.37,-300 655.37,-300 649.37,-294 649.37,-288 649.37,-288 649.37,-276 649.37,-276 649.37,-270 655.37,-264 661.37,-264 661.37,-264 925.37,-264 925.37,-264 931.37,-264 937.37,-270 937.37,-276 937.37,-276 937.37,-288 937.37,-288 937.37,-294 931.37,-300 925.37,-300"/>
|
||||
<text text-anchor="middle" x="793.37" y="-278.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefixNorVersions</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Idle->NotSkippingPrefixNorVersions.Processing -->
|
||||
<g id="edge3" class="edge">
|
||||
<title>NotSkippingPrefixNorVersions.Idle->NotSkippingPrefixNorVersions.Processing</title>
|
||||
<path fill="none" stroke="black" d="M793.37,-350.8C793.37,-339.16 793.37,-323.55 793.37,-310.24"/>
|
||||
<polygon fill="black" stroke="black" points="796.87,-310.18 793.37,-300.18 789.87,-310.18 796.87,-310.18"/>
|
||||
<text text-anchor="middle" x="851.37" y="-321.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Idle -->
|
||||
<g id="node4" class="node">
|
||||
<title>SkippingPrefix.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M1209.37,-138C1209.37,-138 1017.37,-138 1017.37,-138 1011.37,-138 1005.37,-132 1005.37,-126 1005.37,-126 1005.37,-114 1005.37,-114 1005.37,-108 1011.37,-102 1017.37,-102 1017.37,-102 1209.37,-102 1209.37,-102 1215.37,-102 1221.37,-108 1221.37,-114 1221.37,-114 1221.37,-126 1221.37,-126 1221.37,-132 1215.37,-138 1209.37,-138"/>
|
||||
<text text-anchor="middle" x="1113.37" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Processing -->
|
||||
<g id="node8" class="node">
|
||||
<title>SkippingPrefix.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M1070.37,-36C1070.37,-36 878.37,-36 878.37,-36 872.37,-36 866.37,-30 866.37,-24 866.37,-24 866.37,-12 866.37,-12 866.37,-6 872.37,0 878.37,0 878.37,0 1070.37,0 1070.37,0 1076.37,0 1082.37,-6 1082.37,-12 1082.37,-12 1082.37,-24 1082.37,-24 1082.37,-30 1076.37,-36 1070.37,-36"/>
|
||||
<text text-anchor="middle" x="974.37" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
||||
<g id="edge4" class="edge">
|
||||
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
||||
<path fill="none" stroke="black" d="M1011.89,-101.96C994.96,-97.13 981.04,-91.17 975.37,-84 967.11,-73.56 966.25,-58.93 967.72,-46.2"/>
|
||||
<polygon fill="black" stroke="black" points="971.22,-46.52 969.4,-36.09 964.31,-45.38 971.22,-46.52"/>
|
||||
<text text-anchor="middle" x="1033.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- SkippingVersions.Processing -->
|
||||
<g id="node9" class="node">
|
||||
<title>SkippingVersions.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M381.37,-36C381.37,-36 189.37,-36 189.37,-36 183.37,-36 177.37,-30 177.37,-24 177.37,-24 177.37,-12 177.37,-12 177.37,-6 183.37,0 189.37,0 189.37,0 381.37,0 381.37,0 387.37,0 393.37,-6 393.37,-12 393.37,-12 393.37,-24 393.37,-24 393.37,-30 387.37,-36 381.37,-36"/>
|
||||
<text text-anchor="middle" x="285.37" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||
</g>
|
||||
<!-- SkippingVersions.Idle->SkippingVersions.Processing -->
|
||||
<g id="edge5" class="edge">
|
||||
<title>SkippingVersions.Idle->SkippingVersions.Processing</title>
|
||||
<path fill="none" stroke="black" d="M141.4,-101.91C138.35,-87.58 136.8,-67.37 147.37,-54 151.89,-48.28 161.64,-43.34 173.99,-39.12"/>
|
||||
<polygon fill="black" stroke="black" points="175.39,-42.36 183.89,-36.04 173.3,-35.67 175.39,-42.36"/>
|
||||
<text text-anchor="middle" x="205.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- WaitVersionAfterPHD.Idle -->
|
||||
<g id="node6" class="node">
|
||||
<title>WaitVersionAfterPHD.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M1534.37,-138C1534.37,-138 1342.37,-138 1342.37,-138 1336.37,-138 1330.37,-132 1330.37,-126 1330.37,-126 1330.37,-114 1330.37,-114 1330.37,-108 1336.37,-102 1342.37,-102 1342.37,-102 1534.37,-102 1534.37,-102 1540.37,-102 1546.37,-108 1546.37,-114 1546.37,-114 1546.37,-126 1546.37,-126 1546.37,-132 1540.37,-138 1534.37,-138"/>
|
||||
<text text-anchor="middle" x="1438.37" y="-116.2" font-family="Times,serif" font-size="16.00">WaitVersionAfterPHD</text>
|
||||
</g>
|
||||
<!-- WaitVersionAfterPHD.Processing -->
|
||||
<g id="node10" class="node">
|
||||
<title>WaitVersionAfterPHD.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M1534.37,-36C1534.37,-36 1342.37,-36 1342.37,-36 1336.37,-36 1330.37,-30 1330.37,-24 1330.37,-24 1330.37,-12 1330.37,-12 1330.37,-6 1336.37,0 1342.37,0 1342.37,0 1534.37,0 1534.37,0 1540.37,0 1546.37,-6 1546.37,-12 1546.37,-12 1546.37,-24 1546.37,-24 1546.37,-30 1540.37,-36 1534.37,-36"/>
|
||||
<text text-anchor="middle" x="1438.37" y="-14.2" font-family="Times,serif" font-size="16.00">WaitVersionAfterPHD</text>
|
||||
</g>
|
||||
<!-- WaitVersionAfterPHD.Idle->WaitVersionAfterPHD.Processing -->
|
||||
<g id="edge6" class="edge">
|
||||
<title>WaitVersionAfterPHD.Idle->WaitVersionAfterPHD.Processing</title>
|
||||
<path fill="none" stroke="black" d="M1438.37,-101.58C1438.37,-86.38 1438.37,-64.07 1438.37,-46.46"/>
|
||||
<polygon fill="black" stroke="black" points="1441.87,-46.22 1438.37,-36.22 1434.87,-46.22 1441.87,-46.22"/>
|
||||
<text text-anchor="middle" x="1496.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Processing->END -->
|
||||
<g id="edge10" class="edge">
|
||||
<title>NotSkippingPrefixNorVersions.Processing->END</title>
|
||||
<path fill="none" stroke="black" d="M649.15,-273.62C611.7,-268.54 578.44,-260.07 566.37,-246 540.33,-215.64 540,-186.08 566.37,-156 586.46,-133.07 673.88,-148.86 702.37,-138 705.22,-136.91 708.06,-135.44 710.76,-133.82"/>
|
||||
<polygon fill="black" stroke="black" points="712.88,-136.61 719.13,-128.05 708.91,-130.84 712.88,-136.61"/>
|
||||
<text text-anchor="middle" x="672.87" y="-212.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||
<text text-anchor="middle" x="672.87" y="-197.3" font-family="Times,serif" font-size="14.00">Keys == maxKeys]</text>
|
||||
<text text-anchor="middle" x="672.87" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle -->
|
||||
<g id="edge9" class="edge">
|
||||
<title>NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle</title>
|
||||
<path fill="none" stroke="black" d="M937.6,-274.31C1018.89,-269.01 1106.69,-260.11 1119.37,-246 1143.16,-219.51 1134.03,-175.72 1124.38,-147.62"/>
|
||||
<polygon fill="black" stroke="black" points="1127.6,-146.22 1120.86,-138.04 1121.03,-148.64 1127.6,-146.22"/>
|
||||
<text text-anchor="middle" x="1254.37" y="-212.3" font-family="Times,serif" font-size="14.00">[key.startsWith(<ReplayPrefix>)]</text>
|
||||
<text text-anchor="middle" x="1254.37" y="-197.3" font-family="Times,serif" font-size="14.00">/ prefix <- <ReplayPrefix></text>
|
||||
<text text-anchor="middle" x="1254.37" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle -->
|
||||
<g id="edge11" class="edge">
|
||||
<title>NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle</title>
|
||||
<path fill="none" stroke="black" d="M799.18,-263.65C800.96,-258.05 802.85,-251.79 804.37,-246 814.73,-206.45 793.03,-183.41 823.37,-156 851.23,-130.83 954.1,-142.59 991.37,-138 992.65,-137.84 993.94,-137.68 995.24,-137.52"/>
|
||||
<polygon fill="black" stroke="black" points="995.81,-140.98 1005.29,-136.25 994.93,-134.03 995.81,-140.98"/>
|
||||
<text text-anchor="middle" x="969.37" y="-234.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||
<text text-anchor="middle" x="969.37" y="-219.8" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||
<text text-anchor="middle" x="969.37" y="-204.8" font-family="Times,serif" font-size="14.00">hasDelimiter(key)]</text>
|
||||
<text text-anchor="middle" x="969.37" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
||||
<text text-anchor="middle" x="969.37" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
||||
<text text-anchor="middle" x="969.37" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle -->
|
||||
<g id="edge7" class="edge">
|
||||
<title>NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle</title>
|
||||
<path fill="none" stroke="black" d="M649.11,-279.23C439.56,-275.94 73.58,-267.19 53.37,-246 25.76,-217.06 30.6,-188.89 53.37,-156 56.56,-151.39 60.44,-147.39 64.78,-143.91"/>
|
||||
<polygon fill="black" stroke="black" points="66.8,-146.76 73.04,-138.2 62.83,-141 66.8,-146.76"/>
|
||||
<text text-anchor="middle" x="167.87" y="-204.8" font-family="Times,serif" font-size="14.00">[Version.isDeleteMarker(value)]</text>
|
||||
<text text-anchor="middle" x="167.87" y="-189.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle -->
|
||||
<g id="edge12" class="edge">
|
||||
<title>NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle</title>
|
||||
<path fill="none" stroke="black" d="M649.33,-279.1C514.97,-275.99 331.4,-267.75 305.37,-246 273.69,-219.53 311.53,-185.22 282.37,-156 276.73,-150.36 270.32,-145.59 263.42,-141.56"/>
|
||||
<polygon fill="black" stroke="black" points="264.92,-138.39 254.44,-136.84 261.67,-144.59 264.92,-138.39"/>
|
||||
<text text-anchor="middle" x="411.87" y="-227.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||
<text text-anchor="middle" x="411.87" y="-212.3" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||
<text text-anchor="middle" x="411.87" y="-197.3" font-family="Times,serif" font-size="14.00">not hasDelimiter(key)]</text>
|
||||
<text text-anchor="middle" x="411.87" y="-182.3" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
||||
<text text-anchor="middle" x="411.87" y="-167.3" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefixNorVersions.Processing->WaitVersionAfterPHD.Idle -->
|
||||
<g id="edge8" class="edge">
|
||||
<title>NotSkippingPrefixNorVersions.Processing->WaitVersionAfterPHD.Idle</title>
|
||||
<path fill="none" stroke="black" d="M937.38,-280.87C1099.43,-279.42 1344.59,-272.74 1378.37,-246 1411.11,-220.08 1384.48,-192.16 1405.37,-156 1407.38,-152.52 1409.8,-149.11 1412.4,-145.87"/>
|
||||
<polygon fill="black" stroke="black" points="1415.16,-148.04 1419.13,-138.21 1409.9,-143.41 1415.16,-148.04"/>
|
||||
<text text-anchor="middle" x="1486.87" y="-204.8" font-family="Times,serif" font-size="14.00">[Version.isPHD(value)]</text>
|
||||
<text text-anchor="middle" x="1486.87" y="-189.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||
<g id="edge13" class="edge">
|
||||
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||
<path fill="none" stroke="black" d="M1064.61,-36.08C1074.44,-40.7 1083.66,-46.57 1091.37,-54 1101.65,-63.92 1107.13,-78.81 1110.04,-91.84"/>
|
||||
<polygon fill="black" stroke="black" points="1106.62,-92.56 1111.88,-101.76 1113.5,-91.29 1106.62,-92.56"/>
|
||||
<text text-anchor="middle" x="1190.37" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
||||
<text text-anchor="middle" x="1190.37" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||
<g id="edge14" class="edge">
|
||||
<title>SkippingPrefix.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||
<path fill="none" stroke="black" d="M899.82,-36.01C864.18,-48.2 824.54,-68.57 802.37,-102 771.84,-148.02 779.31,-216.26 786.77,-253.8"/>
|
||||
<polygon fill="black" stroke="black" points="783.43,-254.92 788.94,-263.97 790.28,-253.46 783.43,-254.92"/>
|
||||
<text text-anchor="middle" x="899.37" y="-116.3" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
||||
</g>
|
||||
<!-- SkippingVersions.Processing->SkippingVersions.Idle -->
|
||||
<g id="edge15" class="edge">
|
||||
<title>SkippingVersions.Processing->SkippingVersions.Idle</title>
|
||||
<path fill="none" stroke="black" d="M283.88,-36.24C281.71,-50.87 276.4,-71.43 263.37,-84 258.07,-89.11 252.06,-93.48 245.62,-97.21"/>
|
||||
<polygon fill="black" stroke="black" points="243.85,-94.19 236.61,-101.92 247.09,-100.39 243.85,-94.19"/>
|
||||
<text text-anchor="middle" x="349.87" y="-72.8" font-family="Times,serif" font-size="14.00">[isVersionKey(key)]</text>
|
||||
<text text-anchor="middle" x="349.87" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||
</g>
|
||||
<!-- SkippingVersions.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||
<g id="edge16" class="edge">
|
||||
<title>SkippingVersions.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||
<path fill="none" stroke="black" d="M382.46,-36.08C396.72,-40.7 410.82,-46.57 423.37,-54 476.67,-85.57 487.28,-102.42 518.37,-156 539.39,-192.23 514.46,-218.85 546.37,-246 561.72,-259.06 598.56,-267.25 639.23,-272.39"/>
|
||||
<polygon fill="black" stroke="black" points="639.01,-275.89 649.36,-273.59 639.84,-268.93 639.01,-275.89"/>
|
||||
<text text-anchor="middle" x="590.37" y="-116.3" font-family="Times,serif" font-size="14.00">[not isVersionKey(key)]</text>
|
||||
</g>
|
||||
<!-- WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||
<g id="edge17" class="edge">
|
||||
<title>WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||
<path fill="none" stroke="black" d="M1536.41,-36.13C1544.73,-40.79 1552.27,-46.65 1558.37,-54 1585.64,-86.89 1597.89,-215.12 1568.37,-246 1547.29,-268.05 1167.71,-276.42 947.74,-279.43"/>
|
||||
<polygon fill="black" stroke="black" points="947.67,-275.93 937.71,-279.57 947.76,-282.93 947.67,-275.93"/>
|
||||
<text text-anchor="middle" x="1758.37" y="-123.8" font-family="Times,serif" font-size="14.00">[isVersionKey(key) and master(key) == PHDkey]</text>
|
||||
<text text-anchor="middle" x="1758.37" y="-108.8" font-family="Times,serif" font-size="14.00">/ key <- master(key)</text>
|
||||
</g>
|
||||
<!-- WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||
<g id="edge18" class="edge">
|
||||
<title>WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||
<path fill="none" stroke="black" d="M1546.51,-21.25C1677.94,-26.54 1888.29,-44.09 1937.37,-102 1947.71,-114.21 1946.85,-125.11 1937.37,-138 1841.62,-268.08 1749.48,-218.23 1590.37,-246 1471.26,-266.79 1143.92,-275.5 947.77,-278.94"/>
|
||||
<polygon fill="black" stroke="black" points="947.6,-275.44 937.66,-279.11 947.72,-282.44 947.6,-275.44"/>
|
||||
<text text-anchor="middle" x="2124.87" y="-116.3" font-family="Times,serif" font-size="14.00">[not isVersionKey(key) or master(key) != PHDkey]</text>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
Before Width: | Height: | Size: 18 KiB |
|
@ -1,35 +0,0 @@
|
|||
digraph {
|
||||
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
||||
edge [fontsize=14];
|
||||
rankdir=TB;
|
||||
|
||||
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
||||
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
||||
|
||||
node [fillcolor="lightgrey"];
|
||||
"NotSkipping.Idle" [label="NotSkipping",group="NotSkipping"];
|
||||
"NeverSkipping.Idle" [label="NeverSkipping",group="NeverSkipping"];
|
||||
"NotSkippingPrefix.Idle" [label="NotSkippingPrefix",group="NotSkippingPrefix"];
|
||||
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||
|
||||
node [fillcolor="lightblue"];
|
||||
"NeverSkipping.Processing" [label="NeverSkipping",group="NeverSkipping"];
|
||||
"NotSkippingPrefix.Processing" [label="NotSkippingPrefix",group="NotSkippingPrefix"];
|
||||
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||
|
||||
START -> "NotSkipping.Idle"
|
||||
"NotSkipping.Idle" -> "NeverSkipping.Idle" [label="[delimiter == undefined]"]
|
||||
"NotSkipping.Idle" -> "NotSkippingPrefix.Idle" [label="[delimiter == '/']"]
|
||||
|
||||
"NeverSkipping.Idle" -> "NeverSkipping.Processing" [label="filter(key, value)"]
|
||||
"NotSkippingPrefix.Idle" -> "NotSkippingPrefix.Processing" [label="filter(key, value)"]
|
||||
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
||||
|
||||
"NeverSkipping.Processing" -> END [label="[nKeys == maxKeys]\n-> FILTER_END"]
|
||||
"NeverSkipping.Processing" -> "NeverSkipping.Idle" [label="[nKeys < maxKeys]\n/ Contents.append(key, value)\n -> FILTER_ACCEPT"]
|
||||
"NotSkippingPrefix.Processing" -> END [label="[nKeys == maxKeys]\n -> FILTER_END"]
|
||||
"NotSkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[nKeys < maxKeys and hasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
||||
"NotSkippingPrefix.Processing" -> "NotSkippingPrefix.Idle" [label="[nKeys < maxKeys and not hasDelimiter(key)]\n/ Contents.append(key, value)\n -> FILTER_ACCEPT"]
|
||||
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
||||
"SkippingPrefix.Processing" -> "NotSkippingPrefix.Processing" [label="[not key.startsWith(prefix)]"]
|
||||
}
|
|
@ -1,166 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<!-- Generated by graphviz version 2.43.0 (0)
|
||||
-->
|
||||
<!-- Title: %3 Pages: 1 -->
|
||||
<svg width="975pt" height="533pt"
|
||||
viewBox="0.00 0.00 975.00 533.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 529)">
|
||||
<title>%3</title>
|
||||
<polygon fill="white" stroke="transparent" points="-4,4 -4,-529 971,-529 971,4 -4,4"/>
|
||||
<!-- START -->
|
||||
<g id="node1" class="node">
|
||||
<title>START</title>
|
||||
<ellipse fill="black" stroke="black" cx="283" cy="-518" rx="7" ry="7"/>
|
||||
</g>
|
||||
<!-- NotSkipping.Idle -->
|
||||
<g id="node3" class="node">
|
||||
<title>NotSkipping.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M379,-474C379,-474 187,-474 187,-474 181,-474 175,-468 175,-462 175,-462 175,-450 175,-450 175,-444 181,-438 187,-438 187,-438 379,-438 379,-438 385,-438 391,-444 391,-450 391,-450 391,-462 391,-462 391,-468 385,-474 379,-474"/>
|
||||
<text text-anchor="middle" x="283" y="-452.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
|
||||
</g>
|
||||
<!-- START->NotSkipping.Idle -->
|
||||
<g id="edge1" class="edge">
|
||||
<title>START->NotSkipping.Idle</title>
|
||||
<path fill="none" stroke="black" d="M283,-510.58C283,-504.23 283,-494.07 283,-484.3"/>
|
||||
<polygon fill="black" stroke="black" points="286.5,-484.05 283,-474.05 279.5,-484.05 286.5,-484.05"/>
|
||||
</g>
|
||||
<!-- END -->
|
||||
<g id="node2" class="node">
|
||||
<title>END</title>
|
||||
<ellipse fill="black" stroke="black" cx="196" cy="-120" rx="7" ry="7"/>
|
||||
<ellipse fill="none" stroke="black" cx="196" cy="-120" rx="11" ry="11"/>
|
||||
</g>
|
||||
<!-- NeverSkipping.Idle -->
|
||||
<g id="node4" class="node">
|
||||
<title>NeverSkipping.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M262,-387C262,-387 70,-387 70,-387 64,-387 58,-381 58,-375 58,-375 58,-363 58,-363 58,-357 64,-351 70,-351 70,-351 262,-351 262,-351 268,-351 274,-357 274,-363 274,-363 274,-375 274,-375 274,-381 268,-387 262,-387"/>
|
||||
<text text-anchor="middle" x="166" y="-365.2" font-family="Times,serif" font-size="16.00">NeverSkipping</text>
|
||||
</g>
|
||||
<!-- NotSkipping.Idle->NeverSkipping.Idle -->
|
||||
<g id="edge2" class="edge">
|
||||
<title>NotSkipping.Idle->NeverSkipping.Idle</title>
|
||||
<path fill="none" stroke="black" d="M216.5,-437.82C206.51,-433.18 196.91,-427.34 189,-420 182.25,-413.74 177.33,-405.11 173.81,-396.79"/>
|
||||
<polygon fill="black" stroke="black" points="177.05,-395.47 170.3,-387.31 170.49,-397.9 177.05,-395.47"/>
|
||||
<text text-anchor="middle" x="279.5" y="-408.8" font-family="Times,serif" font-size="14.00">[delimiter == undefined]</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefix.Idle -->
|
||||
<g id="node5" class="node">
|
||||
<title>NotSkippingPrefix.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M496,-387C496,-387 304,-387 304,-387 298,-387 292,-381 292,-375 292,-375 292,-363 292,-363 292,-357 298,-351 304,-351 304,-351 496,-351 496,-351 502,-351 508,-357 508,-363 508,-363 508,-375 508,-375 508,-381 502,-387 496,-387"/>
|
||||
<text text-anchor="middle" x="400" y="-365.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefix</text>
|
||||
</g>
|
||||
<!-- NotSkipping.Idle->NotSkippingPrefix.Idle -->
|
||||
<g id="edge3" class="edge">
|
||||
<title>NotSkipping.Idle->NotSkippingPrefix.Idle</title>
|
||||
<path fill="none" stroke="black" d="M340.77,-437.93C351.2,-433.2 361.45,-427.29 370,-420 377.58,-413.53 383.76,-404.65 388.51,-396.16"/>
|
||||
<polygon fill="black" stroke="black" points="391.63,-397.74 393.08,-387.24 385.4,-394.54 391.63,-397.74"/>
|
||||
<text text-anchor="middle" x="442.5" y="-408.8" font-family="Times,serif" font-size="14.00">[delimiter == '/']</text>
|
||||
</g>
|
||||
<!-- NeverSkipping.Processing -->
|
||||
<g id="node7" class="node">
|
||||
<title>NeverSkipping.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M204,-270C204,-270 12,-270 12,-270 6,-270 0,-264 0,-258 0,-258 0,-246 0,-246 0,-240 6,-234 12,-234 12,-234 204,-234 204,-234 210,-234 216,-240 216,-246 216,-246 216,-258 216,-258 216,-264 210,-270 204,-270"/>
|
||||
<text text-anchor="middle" x="108" y="-248.2" font-family="Times,serif" font-size="16.00">NeverSkipping</text>
|
||||
</g>
|
||||
<!-- NeverSkipping.Idle->NeverSkipping.Processing -->
|
||||
<g id="edge4" class="edge">
|
||||
<title>NeverSkipping.Idle->NeverSkipping.Processing</title>
|
||||
<path fill="none" stroke="black" d="M64.1,-350.93C47.33,-346.11 33.58,-340.17 28,-333 15.72,-317.21 17.05,-304.74 28,-288 30.93,-283.52 34.58,-279.6 38.69,-276.19"/>
|
||||
<polygon fill="black" stroke="black" points="40.97,-278.86 47.1,-270.22 36.92,-273.16 40.97,-278.86"/>
|
||||
<text text-anchor="middle" x="86" y="-306.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefix.Processing -->
|
||||
<g id="node8" class="node">
|
||||
<title>NotSkippingPrefix.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M554,-270C554,-270 362,-270 362,-270 356,-270 350,-264 350,-258 350,-258 350,-246 350,-246 350,-240 356,-234 362,-234 362,-234 554,-234 554,-234 560,-234 566,-240 566,-246 566,-246 566,-258 566,-258 566,-264 560,-270 554,-270"/>
|
||||
<text text-anchor="middle" x="458" y="-248.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefix</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefix.Idle->NotSkippingPrefix.Processing -->
|
||||
<g id="edge5" class="edge">
|
||||
<title>NotSkippingPrefix.Idle->NotSkippingPrefix.Processing</title>
|
||||
<path fill="none" stroke="black" d="M395.69,-350.84C392.38,-333.75 390.03,-307.33 401,-288 403.42,-283.74 406.58,-279.94 410.19,-276.55"/>
|
||||
<polygon fill="black" stroke="black" points="412.5,-279.18 418.1,-270.18 408.11,-273.73 412.5,-279.18"/>
|
||||
<text text-anchor="middle" x="459" y="-306.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Idle -->
|
||||
<g id="node6" class="node">
|
||||
<title>SkippingPrefix.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M554,-138C554,-138 362,-138 362,-138 356,-138 350,-132 350,-126 350,-126 350,-114 350,-114 350,-108 356,-102 362,-102 362,-102 554,-102 554,-102 560,-102 566,-108 566,-114 566,-114 566,-126 566,-126 566,-132 560,-138 554,-138"/>
|
||||
<text text-anchor="middle" x="458" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Processing -->
|
||||
<g id="node9" class="node">
|
||||
<title>SkippingPrefix.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M691,-36C691,-36 499,-36 499,-36 493,-36 487,-30 487,-24 487,-24 487,-12 487,-12 487,-6 493,0 499,0 499,0 691,0 691,0 697,0 703,-6 703,-12 703,-12 703,-24 703,-24 703,-30 697,-36 691,-36"/>
|
||||
<text text-anchor="middle" x="595" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
||||
<g id="edge6" class="edge">
|
||||
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
||||
<path fill="none" stroke="black" d="M452.35,-101.95C448.76,-87.65 446.54,-67.45 457,-54 461.44,-48.29 471.08,-43.36 483.3,-39.15"/>
|
||||
<polygon fill="black" stroke="black" points="484.61,-42.41 493.1,-36.07 482.51,-35.73 484.61,-42.41"/>
|
||||
<text text-anchor="middle" x="515" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- NeverSkipping.Processing->END -->
|
||||
<g id="edge7" class="edge">
|
||||
<title>NeverSkipping.Processing->END</title>
|
||||
<path fill="none" stroke="black" d="M102.91,-233.88C97.93,-213.45 93.18,-179.15 109,-156 123.79,-134.35 154.41,-126.09 175.08,-122.94"/>
|
||||
<polygon fill="black" stroke="black" points="175.62,-126.4 185.11,-121.69 174.76,-119.45 175.62,-126.4"/>
|
||||
<text text-anchor="middle" x="185" y="-189.8" font-family="Times,serif" font-size="14.00">[nKeys == maxKeys]</text>
|
||||
<text text-anchor="middle" x="185" y="-174.8" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
||||
</g>
|
||||
<!-- NeverSkipping.Processing->NeverSkipping.Idle -->
|
||||
<g id="edge8" class="edge">
|
||||
<title>NeverSkipping.Processing->NeverSkipping.Idle</title>
|
||||
<path fill="none" stroke="black" d="M129.49,-270.27C134.87,-275.48 140.18,-281.55 144,-288 153.56,-304.17 159.09,-324.63 162.21,-340.81"/>
|
||||
<polygon fill="black" stroke="black" points="158.78,-341.49 163.94,-350.74 165.68,-340.29 158.78,-341.49"/>
|
||||
<text text-anchor="middle" x="265.5" y="-321.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys]</text>
|
||||
<text text-anchor="middle" x="265.5" y="-306.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
||||
<text text-anchor="middle" x="265.5" y="-291.8" font-family="Times,serif" font-size="14.00"> -> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefix.Processing->END -->
|
||||
<g id="edge9" class="edge">
|
||||
<title>NotSkippingPrefix.Processing->END</title>
|
||||
<path fill="none" stroke="black" d="M349.96,-237.93C333,-232.81 316.36,-225.74 302,-216 275.27,-197.87 285.01,-177.6 261,-156 247.64,-143.98 229.41,-134.62 215.65,-128.62"/>
|
||||
<polygon fill="black" stroke="black" points="216.74,-125.28 206.16,-124.7 214.07,-131.75 216.74,-125.28"/>
|
||||
<text text-anchor="middle" x="378" y="-189.8" font-family="Times,serif" font-size="14.00">[nKeys == maxKeys]</text>
|
||||
<text text-anchor="middle" x="378" y="-174.8" font-family="Times,serif" font-size="14.00"> -> FILTER_END</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefix.Processing->NotSkippingPrefix.Idle -->
|
||||
<g id="edge11" class="edge">
|
||||
<title>NotSkippingPrefix.Processing->NotSkippingPrefix.Idle</title>
|
||||
<path fill="none" stroke="black" d="M499.64,-270.11C506.59,-274.86 512.87,-280.76 517,-288 526.9,-305.38 528.94,-316.96 517,-333 513.56,-337.62 509.53,-341.66 505.07,-345.18"/>
|
||||
<polygon fill="black" stroke="black" points="502.89,-342.43 496.63,-350.98 506.85,-348.2 502.89,-342.43"/>
|
||||
<text text-anchor="middle" x="690.5" y="-321.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys and not hasDelimiter(key)]</text>
|
||||
<text text-anchor="middle" x="690.5" y="-306.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
||||
<text text-anchor="middle" x="690.5" y="-291.8" font-family="Times,serif" font-size="14.00"> -> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- NotSkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||
<g id="edge10" class="edge">
|
||||
<title>NotSkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||
<path fill="none" stroke="black" d="M458,-233.74C458,-211.98 458,-174.32 458,-148.56"/>
|
||||
<polygon fill="black" stroke="black" points="461.5,-148.33 458,-138.33 454.5,-148.33 461.5,-148.33"/>
|
||||
<text text-anchor="middle" x="609.5" y="-204.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys and hasDelimiter(key)]</text>
|
||||
<text text-anchor="middle" x="609.5" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
||||
<text text-anchor="middle" x="609.5" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
||||
<text text-anchor="middle" x="609.5" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||
<g id="edge12" class="edge">
|
||||
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||
<path fill="none" stroke="black" d="M593.49,-36.23C591.32,-50.84 586,-71.39 573,-84 567.75,-89.09 561.77,-93.45 555.38,-97.17"/>
|
||||
<polygon fill="black" stroke="black" points="553.66,-94.12 546.43,-101.87 556.91,-100.32 553.66,-94.12"/>
|
||||
<text text-anchor="middle" x="672" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
||||
<text text-anchor="middle" x="672" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Processing->NotSkippingPrefix.Processing -->
|
||||
<g id="edge13" class="edge">
|
||||
<title>SkippingPrefix.Processing->NotSkippingPrefix.Processing</title>
|
||||
<path fill="none" stroke="black" d="M703.16,-31.64C728.6,-36.87 750.75,-44.11 759,-54 778.46,-77.34 776.26,-200.01 762,-216 749.37,-230.17 656.13,-239.42 576.2,-244.84"/>
|
||||
<polygon fill="black" stroke="black" points="575.77,-241.36 566.03,-245.51 576.24,-248.34 575.77,-241.36"/>
|
||||
<text text-anchor="middle" x="870" y="-116.3" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
Before Width: | Height: | Size: 12 KiB |
|
@ -1,50 +0,0 @@
|
|||
digraph {
|
||||
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
||||
edge [fontsize=14];
|
||||
rankdir=TB;
|
||||
|
||||
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
||||
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
||||
|
||||
node [fillcolor="lightgrey"];
|
||||
"NotSkipping.Idle" [label="NotSkipping",group="NotSkipping",width=4];
|
||||
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||
"WaitForNullKey.Idle" [label="WaitForNullKey",group="WaitForNullKey"];
|
||||
"SkippingVersions.Idle" [label="SkippingVersions",group="SkippingVersions"];
|
||||
|
||||
node [fillcolor="lightblue"];
|
||||
"NotSkipping.Processing" [label="NotSkipping",group="NotSkipping",width=4];
|
||||
"NotSkippingV0.Processing" [label="NotSkippingV0",group="NotSkipping",width=4];
|
||||
"NotSkippingV1.Processing" [label="NotSkippingV1",group="NotSkipping",width=4];
|
||||
"NotSkippingCommon.Processing" [label="NotSkippingCommon",group="NotSkipping",width=4];
|
||||
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||
"WaitForNullKey.Processing" [label="WaitForNullKey",group="WaitForNullKey"];
|
||||
"SkippingVersions.Processing" [label="SkippingVersions",group="SkippingVersions"];
|
||||
|
||||
START -> "WaitForNullKey.Idle" [label="[versionIdMarker != undefined]"]
|
||||
START -> "NotSkipping.Idle" [label="[versionIdMarker == undefined]"]
|
||||
|
||||
"NotSkipping.Idle" -> "NotSkipping.Processing" [label="filter(key, value)"]
|
||||
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
||||
"WaitForNullKey.Idle" -> "WaitForNullKey.Processing" [label="filter(key, value)"]
|
||||
"SkippingVersions.Idle" -> "SkippingVersions.Processing" [label="filter(key, value)"]
|
||||
|
||||
"NotSkipping.Processing" -> "NotSkippingV0.Processing" [label="vFormat='v0'"]
|
||||
"NotSkipping.Processing" -> "NotSkippingV1.Processing" [label="vFormat='v1'"]
|
||||
|
||||
"WaitForNullKey.Processing" -> "NotSkipping.Processing" [label="master(key) != keyMarker"]
|
||||
"WaitForNullKey.Processing" -> "SkippingVersions.Processing" [label="master(key) == keyMarker"]
|
||||
"NotSkippingV0.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(<ReplayPrefix>)]\n/ prefix <- <ReplayPrefix>\n-> FILTER_SKIP"]
|
||||
"NotSkippingV0.Processing" -> "NotSkipping.Idle" [label="[Version.isPHD(value)]\n-> FILTER_ACCEPT"]
|
||||
"NotSkippingV0.Processing" -> "NotSkippingCommon.Processing" [label="[not key.startsWith(<ReplayPrefix>)\nand not Version.isPHD(value)]"]
|
||||
"NotSkippingV1.Processing" -> "NotSkippingCommon.Processing" [label="[always]"]
|
||||
"NotSkippingCommon.Processing" -> END [label="[isListableKey(key, value) and\nKeys == maxKeys]\n-> FILTER_END"]
|
||||
"NotSkippingCommon.Processing" -> "SkippingPrefix.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nhasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
||||
"NotSkippingCommon.Processing" -> "NotSkipping.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nnot hasDelimiter(key)]\n/ Contents.append(key, versionId, value)\n-> FILTER_ACCEPT"]
|
||||
|
||||
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
||||
"SkippingPrefix.Processing" -> "NotSkipping.Processing" [label="[not key.startsWith(prefix)]"]
|
||||
"SkippingVersions.Processing" -> "NotSkipping.Processing" [label="master(key) !== keyMarker or \nversionId > versionIdMarker"]
|
||||
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="master(key) === keyMarker and \nversionId < versionIdMarker\n-> FILTER_SKIP"]
|
||||
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="master(key) === keyMarker and \nversionId == versionIdMarker\n-> FILTER_ACCEPT"]
|
||||
}
|
|
@ -1,265 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<!-- Generated by graphviz version 2.43.0 (0)
|
||||
-->
|
||||
<!-- Title: %3 Pages: 1 -->
|
||||
<svg width="1522pt" height="922pt"
|
||||
viewBox="0.00 0.00 1522.26 922.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 918)">
|
||||
<title>%3</title>
|
||||
<polygon fill="white" stroke="transparent" points="-4,4 -4,-918 1518.26,-918 1518.26,4 -4,4"/>
|
||||
<!-- START -->
|
||||
<g id="node1" class="node">
|
||||
<title>START</title>
|
||||
<ellipse fill="black" stroke="black" cx="393.26" cy="-907" rx="7" ry="7"/>
|
||||
</g>
|
||||
<!-- NotSkipping.Idle -->
|
||||
<g id="node3" class="node">
|
||||
<title>NotSkipping.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M436.26,-675C436.26,-675 172.26,-675 172.26,-675 166.26,-675 160.26,-669 160.26,-663 160.26,-663 160.26,-651 160.26,-651 160.26,-645 166.26,-639 172.26,-639 172.26,-639 436.26,-639 436.26,-639 442.26,-639 448.26,-645 448.26,-651 448.26,-651 448.26,-663 448.26,-663 448.26,-669 442.26,-675 436.26,-675"/>
|
||||
<text text-anchor="middle" x="304.26" y="-653.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
|
||||
</g>
|
||||
<!-- START->NotSkipping.Idle -->
|
||||
<g id="edge2" class="edge">
|
||||
<title>START->NotSkipping.Idle</title>
|
||||
<path fill="none" stroke="black" d="M391.06,-899.87C380.45,-870.31 334.26,-741.58 313.93,-684.93"/>
|
||||
<polygon fill="black" stroke="black" points="317.12,-683.46 310.45,-675.23 310.53,-685.82 317.12,-683.46"/>
|
||||
<text text-anchor="middle" x="470.76" y="-783.8" font-family="Times,serif" font-size="14.00">[versionIdMarker == undefined]</text>
|
||||
</g>
|
||||
<!-- WaitForNullKey.Idle -->
|
||||
<g id="node5" class="node">
|
||||
<title>WaitForNullKey.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M692.26,-849C692.26,-849 500.26,-849 500.26,-849 494.26,-849 488.26,-843 488.26,-837 488.26,-837 488.26,-825 488.26,-825 488.26,-819 494.26,-813 500.26,-813 500.26,-813 692.26,-813 692.26,-813 698.26,-813 704.26,-819 704.26,-825 704.26,-825 704.26,-837 704.26,-837 704.26,-843 698.26,-849 692.26,-849"/>
|
||||
<text text-anchor="middle" x="596.26" y="-827.2" font-family="Times,serif" font-size="16.00">WaitForNullKey</text>
|
||||
</g>
|
||||
<!-- START->WaitForNullKey.Idle -->
|
||||
<g id="edge1" class="edge">
|
||||
<title>START->WaitForNullKey.Idle</title>
|
||||
<path fill="none" stroke="black" d="M399.56,-903.7C420.56,-896.05 489.7,-870.85 540.08,-852.48"/>
|
||||
<polygon fill="black" stroke="black" points="541.38,-855.73 549.57,-849.02 538.98,-849.16 541.38,-855.73"/>
|
||||
<text text-anchor="middle" x="608.76" y="-870.8" font-family="Times,serif" font-size="14.00">[versionIdMarker != undefined]</text>
|
||||
</g>
|
||||
<!-- END -->
|
||||
<g id="node2" class="node">
|
||||
<title>END</title>
|
||||
<ellipse fill="black" stroke="black" cx="45.26" cy="-120" rx="7" ry="7"/>
|
||||
<ellipse fill="none" stroke="black" cx="45.26" cy="-120" rx="11" ry="11"/>
|
||||
</g>
|
||||
<!-- NotSkipping.Processing -->
|
||||
<g id="node7" class="node">
|
||||
<title>NotSkipping.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M761.26,-558C761.26,-558 497.26,-558 497.26,-558 491.26,-558 485.26,-552 485.26,-546 485.26,-546 485.26,-534 485.26,-534 485.26,-528 491.26,-522 497.26,-522 497.26,-522 761.26,-522 761.26,-522 767.26,-522 773.26,-528 773.26,-534 773.26,-534 773.26,-546 773.26,-546 773.26,-552 767.26,-558 761.26,-558"/>
|
||||
<text text-anchor="middle" x="629.26" y="-536.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
|
||||
</g>
|
||||
<!-- NotSkipping.Idle->NotSkipping.Processing -->
|
||||
<g id="edge3" class="edge">
|
||||
<title>NotSkipping.Idle->NotSkipping.Processing</title>
|
||||
<path fill="none" stroke="black" d="M333.17,-638.98C364.86,-620.99 417.68,-592.92 466.26,-576 483.64,-569.95 502.44,-564.74 520.88,-560.34"/>
|
||||
<polygon fill="black" stroke="black" points="521.83,-563.71 530.78,-558.04 520.25,-556.89 521.83,-563.71"/>
|
||||
<text text-anchor="middle" x="524.26" y="-594.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Idle -->
|
||||
<g id="node4" class="node">
|
||||
<title>SkippingPrefix.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M662.26,-138C662.26,-138 470.26,-138 470.26,-138 464.26,-138 458.26,-132 458.26,-126 458.26,-126 458.26,-114 458.26,-114 458.26,-108 464.26,-102 470.26,-102 470.26,-102 662.26,-102 662.26,-102 668.26,-102 674.26,-108 674.26,-114 674.26,-114 674.26,-126 674.26,-126 674.26,-132 668.26,-138 662.26,-138"/>
|
||||
<text text-anchor="middle" x="566.26" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Processing -->
|
||||
<g id="node11" class="node">
|
||||
<title>SkippingPrefix.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M779.26,-36C779.26,-36 587.26,-36 587.26,-36 581.26,-36 575.26,-30 575.26,-24 575.26,-24 575.26,-12 575.26,-12 575.26,-6 581.26,0 587.26,0 587.26,0 779.26,0 779.26,0 785.26,0 791.26,-6 791.26,-12 791.26,-12 791.26,-24 791.26,-24 791.26,-30 785.26,-36 779.26,-36"/>
|
||||
<text text-anchor="middle" x="683.26" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
||||
<g id="edge4" class="edge">
|
||||
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
||||
<path fill="none" stroke="black" d="M552.64,-101.74C543.31,-87.68 534.41,-67.95 545.26,-54 549.71,-48.29 559.34,-43.36 571.56,-39.15"/>
|
||||
<polygon fill="black" stroke="black" points="572.87,-42.41 581.36,-36.07 570.77,-35.73 572.87,-42.41"/>
|
||||
<text text-anchor="middle" x="603.26" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- WaitForNullKey.Processing -->
|
||||
<g id="node12" class="node">
|
||||
<title>WaitForNullKey.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M692.26,-762C692.26,-762 500.26,-762 500.26,-762 494.26,-762 488.26,-756 488.26,-750 488.26,-750 488.26,-738 488.26,-738 488.26,-732 494.26,-726 500.26,-726 500.26,-726 692.26,-726 692.26,-726 698.26,-726 704.26,-732 704.26,-738 704.26,-738 704.26,-750 704.26,-750 704.26,-756 698.26,-762 692.26,-762"/>
|
||||
<text text-anchor="middle" x="596.26" y="-740.2" font-family="Times,serif" font-size="16.00">WaitForNullKey</text>
|
||||
</g>
|
||||
<!-- WaitForNullKey.Idle->WaitForNullKey.Processing -->
|
||||
<g id="edge5" class="edge">
|
||||
<title>WaitForNullKey.Idle->WaitForNullKey.Processing</title>
|
||||
<path fill="none" stroke="black" d="M596.26,-812.8C596.26,-801.16 596.26,-785.55 596.26,-772.24"/>
|
||||
<polygon fill="black" stroke="black" points="599.76,-772.18 596.26,-762.18 592.76,-772.18 599.76,-772.18"/>
|
||||
<text text-anchor="middle" x="654.26" y="-783.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- SkippingVersions.Idle -->
|
||||
<g id="node6" class="node">
|
||||
<title>SkippingVersions.Idle</title>
|
||||
<path fill="lightgrey" stroke="black" d="M1241.26,-558C1241.26,-558 1049.26,-558 1049.26,-558 1043.26,-558 1037.26,-552 1037.26,-546 1037.26,-546 1037.26,-534 1037.26,-534 1037.26,-528 1043.26,-522 1049.26,-522 1049.26,-522 1241.26,-522 1241.26,-522 1247.26,-522 1253.26,-528 1253.26,-534 1253.26,-534 1253.26,-546 1253.26,-546 1253.26,-552 1247.26,-558 1241.26,-558"/>
|
||||
<text text-anchor="middle" x="1145.26" y="-536.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||
</g>
|
||||
<!-- SkippingVersions.Processing -->
|
||||
<g id="node13" class="node">
|
||||
<title>SkippingVersions.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M1241.26,-675C1241.26,-675 1049.26,-675 1049.26,-675 1043.26,-675 1037.26,-669 1037.26,-663 1037.26,-663 1037.26,-651 1037.26,-651 1037.26,-645 1043.26,-639 1049.26,-639 1049.26,-639 1241.26,-639 1241.26,-639 1247.26,-639 1253.26,-645 1253.26,-651 1253.26,-651 1253.26,-663 1253.26,-663 1253.26,-669 1247.26,-675 1241.26,-675"/>
|
||||
<text text-anchor="middle" x="1145.26" y="-653.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||
</g>
|
||||
<!-- SkippingVersions.Idle->SkippingVersions.Processing -->
|
||||
<g id="edge6" class="edge">
|
||||
<title>SkippingVersions.Idle->SkippingVersions.Processing</title>
|
||||
<path fill="none" stroke="black" d="M1145.26,-558.25C1145.26,-576.77 1145.26,-606.45 1145.26,-628.25"/>
|
||||
<polygon fill="black" stroke="black" points="1141.76,-628.53 1145.26,-638.53 1148.76,-628.53 1141.76,-628.53"/>
|
||||
<text text-anchor="middle" x="1203.26" y="-594.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||
</g>
|
||||
<!-- NotSkippingV0.Processing -->
|
||||
<g id="node8" class="node">
|
||||
<title>NotSkippingV0.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M436.26,-411C436.26,-411 172.26,-411 172.26,-411 166.26,-411 160.26,-405 160.26,-399 160.26,-399 160.26,-387 160.26,-387 160.26,-381 166.26,-375 172.26,-375 172.26,-375 436.26,-375 436.26,-375 442.26,-375 448.26,-381 448.26,-387 448.26,-387 448.26,-399 448.26,-399 448.26,-405 442.26,-411 436.26,-411"/>
|
||||
<text text-anchor="middle" x="304.26" y="-389.2" font-family="Times,serif" font-size="16.00">NotSkippingV0</text>
|
||||
</g>
|
||||
<!-- NotSkipping.Processing->NotSkippingV0.Processing -->
|
||||
<g id="edge7" class="edge">
|
||||
<title>NotSkipping.Processing->NotSkippingV0.Processing</title>
|
||||
<path fill="none" stroke="black" d="M573.96,-521.95C558.07,-516.64 540.84,-510.46 525.26,-504 460.22,-477.02 387.62,-439.36 343.97,-415.84"/>
|
||||
<polygon fill="black" stroke="black" points="345.57,-412.72 335.11,-411.04 342.24,-418.88 345.57,-412.72"/>
|
||||
<text text-anchor="middle" x="573.76" y="-462.8" font-family="Times,serif" font-size="14.00">vFormat='v0'</text>
|
||||
</g>
|
||||
<!-- NotSkippingV1.Processing -->
|
||||
<g id="node9" class="node">
|
||||
<title>NotSkippingV1.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M758.26,-411C758.26,-411 494.26,-411 494.26,-411 488.26,-411 482.26,-405 482.26,-399 482.26,-399 482.26,-387 482.26,-387 482.26,-381 488.26,-375 494.26,-375 494.26,-375 758.26,-375 758.26,-375 764.26,-375 770.26,-381 770.26,-387 770.26,-387 770.26,-399 770.26,-399 770.26,-405 764.26,-411 758.26,-411"/>
|
||||
<text text-anchor="middle" x="626.26" y="-389.2" font-family="Times,serif" font-size="16.00">NotSkippingV1</text>
|
||||
</g>
|
||||
<!-- NotSkipping.Processing->NotSkippingV1.Processing -->
|
||||
<g id="edge8" class="edge">
|
||||
<title>NotSkipping.Processing->NotSkippingV1.Processing</title>
|
||||
<path fill="none" stroke="black" d="M628.91,-521.8C628.39,-496.94 627.44,-450.74 626.83,-421.23"/>
|
||||
<polygon fill="black" stroke="black" points="630.32,-421.11 626.62,-411.18 623.33,-421.25 630.32,-421.11"/>
|
||||
<text text-anchor="middle" x="676.76" y="-462.8" font-family="Times,serif" font-size="14.00">vFormat='v1'</text>
|
||||
</g>
|
||||
<!-- NotSkippingV0.Processing->NotSkipping.Idle -->
|
||||
<g id="edge12" class="edge">
|
||||
<title>NotSkippingV0.Processing->NotSkipping.Idle</title>
|
||||
<path fill="none" stroke="black" d="M304.26,-411.25C304.26,-455.74 304.26,-574.61 304.26,-628.62"/>
|
||||
<polygon fill="black" stroke="black" points="300.76,-628.81 304.26,-638.81 307.76,-628.81 300.76,-628.81"/>
|
||||
<text text-anchor="middle" x="385.76" y="-543.8" font-family="Times,serif" font-size="14.00">[Version.isPHD(value)]</text>
|
||||
<text text-anchor="middle" x="385.76" y="-528.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- NotSkippingV0.Processing->SkippingPrefix.Idle -->
|
||||
<g id="edge11" class="edge">
|
||||
<title>NotSkippingV0.Processing->SkippingPrefix.Idle</title>
|
||||
<path fill="none" stroke="black" d="M448.41,-376.93C508.52,-369.95 565.63,-362.09 570.26,-357 622.9,-299.12 594.8,-196.31 577.11,-147.78"/>
|
||||
<polygon fill="black" stroke="black" points="580.33,-146.4 573.53,-138.28 573.78,-148.87 580.33,-146.4"/>
|
||||
<text text-anchor="middle" x="720.26" y="-297.8" font-family="Times,serif" font-size="14.00">[key.startsWith(<ReplayPrefix>)]</text>
|
||||
<text text-anchor="middle" x="720.26" y="-282.8" font-family="Times,serif" font-size="14.00">/ prefix <- <ReplayPrefix></text>
|
||||
<text text-anchor="middle" x="720.26" y="-267.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||
</g>
|
||||
<!-- NotSkippingCommon.Processing -->
|
||||
<g id="node10" class="node">
|
||||
<title>NotSkippingCommon.Processing</title>
|
||||
<path fill="lightblue" stroke="black" d="M436.26,-304.5C436.26,-304.5 172.26,-304.5 172.26,-304.5 166.26,-304.5 160.26,-298.5 160.26,-292.5 160.26,-292.5 160.26,-280.5 160.26,-280.5 160.26,-274.5 166.26,-268.5 172.26,-268.5 172.26,-268.5 436.26,-268.5 436.26,-268.5 442.26,-268.5 448.26,-274.5 448.26,-280.5 448.26,-280.5 448.26,-292.5 448.26,-292.5 448.26,-298.5 442.26,-304.5 436.26,-304.5"/>
|
||||
<text text-anchor="middle" x="304.26" y="-282.7" font-family="Times,serif" font-size="16.00">NotSkippingCommon</text>
|
||||
</g>
|
||||
<!-- NotSkippingV0.Processing->NotSkippingCommon.Processing -->
|
||||
<g id="edge13" class="edge">
|
||||
<title>NotSkippingV0.Processing->NotSkippingCommon.Processing</title>
|
||||
<path fill="none" stroke="black" d="M304.26,-374.74C304.26,-358.48 304.26,-333.85 304.26,-314.9"/>
|
||||
<polygon fill="black" stroke="black" points="307.76,-314.78 304.26,-304.78 300.76,-314.78 307.76,-314.78"/>
|
||||
<text text-anchor="middle" x="435.26" y="-345.8" font-family="Times,serif" font-size="14.00">[not key.startsWith(<ReplayPrefix>)</text>
|
||||
<text text-anchor="middle" x="435.26" y="-330.8" font-family="Times,serif" font-size="14.00">and not Version.isPHD(value)]</text>
|
||||
</g>
|
||||
<!-- NotSkippingV1.Processing->NotSkippingCommon.Processing -->
|
||||
<g id="edge14" class="edge">
|
||||
<title>NotSkippingV1.Processing->NotSkippingCommon.Processing</title>
|
||||
<path fill="none" stroke="black" d="M616.43,-374.83C606.75,-359.62 590.48,-338.14 570.26,-327 549.98,-315.83 505.48,-307.38 458.57,-301.23"/>
|
||||
<polygon fill="black" stroke="black" points="458.9,-297.74 448.53,-299.95 458.01,-304.69 458.9,-297.74"/>
|
||||
<text text-anchor="middle" x="632.26" y="-338.3" font-family="Times,serif" font-size="14.00">[always]</text>
|
||||
</g>
|
||||
<!-- NotSkippingCommon.Processing->END -->
|
||||
<g id="edge15" class="edge">
|
||||
<title>NotSkippingCommon.Processing->END</title>
|
||||
<path fill="none" stroke="black" d="M159.92,-279.56C109.8,-274.24 62.13,-264.33 46.26,-246 20.92,-216.72 30.42,-167.54 38.5,-140.42"/>
|
||||
<polygon fill="black" stroke="black" points="41.94,-141.16 41.67,-130.57 35.27,-139.02 41.94,-141.16"/>
|
||||
<text text-anchor="middle" x="152.76" y="-212.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||
<text text-anchor="middle" x="152.76" y="-197.3" font-family="Times,serif" font-size="14.00">Keys == maxKeys]</text>
|
||||
<text text-anchor="middle" x="152.76" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
||||
</g>
|
||||
<!-- NotSkippingCommon.Processing->NotSkipping.Idle -->
|
||||
<g id="edge17" class="edge">
|
||||
<title>NotSkippingCommon.Processing->NotSkipping.Idle</title>
|
||||
<path fill="none" stroke="black" d="M214.74,-304.54C146.51,-322.73 57.06,-358.99 13.26,-429 -49.27,-528.95 128.43,-602.49 233.32,-635.95"/>
|
||||
<polygon fill="black" stroke="black" points="232.34,-639.31 242.93,-638.97 234.43,-632.63 232.34,-639.31"/>
|
||||
<text text-anchor="middle" x="156.76" y="-492.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||
<text text-anchor="middle" x="156.76" y="-477.8" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||
<text text-anchor="middle" x="156.76" y="-462.8" font-family="Times,serif" font-size="14.00">not hasDelimiter(key)]</text>
|
||||
<text text-anchor="middle" x="156.76" y="-447.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, versionId, value)</text>
|
||||
<text text-anchor="middle" x="156.76" y="-432.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- NotSkippingCommon.Processing->SkippingPrefix.Idle -->
|
||||
<g id="edge16" class="edge">
|
||||
<title>NotSkippingCommon.Processing->SkippingPrefix.Idle</title>
|
||||
<path fill="none" stroke="black" d="M292.14,-268.23C288.18,-261.59 284.27,-253.75 282.26,-246 272.21,-207.28 255.76,-185.96 282.26,-156 293.6,-143.18 374.98,-134.02 447.74,-128.3"/>
|
||||
<polygon fill="black" stroke="black" points="448.24,-131.77 457.94,-127.51 447.7,-124.79 448.24,-131.77"/>
|
||||
<text text-anchor="middle" x="428.26" y="-234.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||
<text text-anchor="middle" x="428.26" y="-219.8" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||
<text text-anchor="middle" x="428.26" y="-204.8" font-family="Times,serif" font-size="14.00">hasDelimiter(key)]</text>
|
||||
<text text-anchor="middle" x="428.26" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
||||
<text text-anchor="middle" x="428.26" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
||||
<text text-anchor="middle" x="428.26" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||
<g id="edge18" class="edge">
|
||||
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||
<path fill="none" stroke="black" d="M681.57,-36.04C679.28,-50.54 673.9,-71.03 661.26,-84 656.4,-88.99 650.77,-93.28 644.72,-96.95"/>
|
||||
<polygon fill="black" stroke="black" points="642.71,-94.06 635.6,-101.92 646.05,-100.21 642.71,-94.06"/>
|
||||
<text text-anchor="middle" x="759.26" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
||||
<text text-anchor="middle" x="759.26" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||
</g>
|
||||
<!-- SkippingPrefix.Processing->NotSkipping.Processing -->
|
||||
<g id="edge19" class="edge">
|
||||
<title>SkippingPrefix.Processing->NotSkipping.Processing</title>
|
||||
<path fill="none" stroke="black" d="M791.46,-33.51C815.84,-38.71 837.21,-45.46 846.26,-54 868.07,-74.57 864.26,-89.02 864.26,-119 864.26,-394 864.26,-394 864.26,-394 864.26,-462.4 791.27,-499.6 726.64,-519.12"/>
|
||||
<polygon fill="black" stroke="black" points="725.39,-515.84 716.77,-521.99 727.35,-522.56 725.39,-515.84"/>
|
||||
<text text-anchor="middle" x="961.26" y="-282.8" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
||||
</g>
|
||||
<!-- WaitForNullKey.Processing->NotSkipping.Processing -->
|
||||
<g id="edge9" class="edge">
|
||||
<title>WaitForNullKey.Processing->NotSkipping.Processing</title>
|
||||
<path fill="none" stroke="black" d="M599.08,-725.78C604.81,-690.67 617.89,-610.59 624.8,-568.31"/>
|
||||
<polygon fill="black" stroke="black" points="628.3,-568.61 626.46,-558.18 621.39,-567.48 628.3,-568.61"/>
|
||||
<text text-anchor="middle" x="707.26" y="-653.3" font-family="Times,serif" font-size="14.00">master(key) != keyMarker</text>
|
||||
</g>
|
||||
<!-- WaitForNullKey.Processing->SkippingVersions.Processing -->
|
||||
<g id="edge10" class="edge">
|
||||
<title>WaitForNullKey.Processing->SkippingVersions.Processing</title>
|
||||
<path fill="none" stroke="black" d="M704.4,-726.26C797.32,-711.87 931.09,-691.16 1026.87,-676.33"/>
|
||||
<polygon fill="black" stroke="black" points="1027.55,-679.77 1036.89,-674.78 1026.47,-672.85 1027.55,-679.77"/>
|
||||
<text text-anchor="middle" x="1001.26" y="-696.8" font-family="Times,serif" font-size="14.00">master(key) == keyMarker</text>
|
||||
</g>
|
||||
<!-- SkippingVersions.Processing->SkippingVersions.Idle -->
|
||||
<g id="edge21" class="edge">
|
||||
<title>SkippingVersions.Processing->SkippingVersions.Idle</title>
|
||||
<path fill="none" stroke="black" d="M1241.89,-638.98C1249.74,-634.29 1256.75,-628.4 1262.26,-621 1274.21,-604.96 1274.21,-592.04 1262.26,-576 1258.82,-571.38 1254.79,-567.34 1250.33,-563.82"/>
|
||||
<polygon fill="black" stroke="black" points="1252.11,-560.8 1241.89,-558.02 1248.15,-566.57 1252.11,-560.8"/>
|
||||
<text text-anchor="middle" x="1392.26" y="-609.8" font-family="Times,serif" font-size="14.00">master(key) === keyMarker and </text>
|
||||
<text text-anchor="middle" x="1392.26" y="-594.8" font-family="Times,serif" font-size="14.00">versionId < versionIdMarker</text>
|
||||
<text text-anchor="middle" x="1392.26" y="-579.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||
</g>
|
||||
<!-- SkippingVersions.Processing->SkippingVersions.Idle -->
|
||||
<g id="edge22" class="edge">
|
||||
<title>SkippingVersions.Processing->SkippingVersions.Idle</title>
|
||||
<path fill="none" stroke="black" d="M1036.97,-654.38C978.97,-650.96 915.73,-642.25 897.26,-621 884.15,-605.9 884.15,-591.1 897.26,-576 914.65,-555.99 971.71,-547.1 1026.73,-543.28"/>
|
||||
<polygon fill="black" stroke="black" points="1027.21,-546.76 1036.97,-542.62 1026.76,-539.77 1027.21,-546.76"/>
|
||||
<text text-anchor="middle" x="1019.26" y="-609.8" font-family="Times,serif" font-size="14.00">master(key) === keyMarker and </text>
|
||||
<text text-anchor="middle" x="1019.26" y="-594.8" font-family="Times,serif" font-size="14.00">versionId == versionIdMarker</text>
|
||||
<text text-anchor="middle" x="1019.26" y="-579.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||
</g>
|
||||
<!-- SkippingVersions.Processing->NotSkipping.Processing -->
|
||||
<g id="edge20" class="edge">
|
||||
<title>SkippingVersions.Processing->NotSkipping.Processing</title>
|
||||
<path fill="none" stroke="black" d="M1037.02,-651.24C897.84,-644.67 672.13,-632.37 657.26,-621 641.04,-608.6 634.18,-586.13 631.3,-568.16"/>
|
||||
<polygon fill="black" stroke="black" points="634.76,-567.68 630.02,-558.21 627.82,-568.57 634.76,-567.68"/>
|
||||
<text text-anchor="middle" x="770.26" y="-602.3" font-family="Times,serif" font-size="14.00">master(key) !== keyMarker or </text>
|
||||
<text text-anchor="middle" x="770.26" y="-587.3" font-family="Times,serif" font-size="14.00">versionId > versionIdMarker</text>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
Before Width: | Height: | Size: 21 KiB |
158
index.ts
158
index.ts
|
@ -1,138 +1,49 @@
|
|||
import * as evaluators from './lib/policyEvaluator/evaluator';
|
||||
import evaluatePrincipal from './lib/policyEvaluator/principal';
|
||||
import RequestContext, {
|
||||
actionNeedQuotaCheck,
|
||||
actionNeedQuotaCheckCopy,
|
||||
actionWithDataDeletion } from './lib/policyEvaluator/RequestContext';
|
||||
import * as requestUtils from './lib/policyEvaluator/requestUtils';
|
||||
import * as actionMaps from './lib/policyEvaluator/utils/actionMaps';
|
||||
import { validateUserPolicy } from './lib/policy/policyValidator'
|
||||
import * as locationConstraints from './lib/patches/locationConstraints';
|
||||
import * as userMetadata from './lib/s3middleware/userMetadata';
|
||||
import convertToXml from './lib/s3middleware/convertToXml';
|
||||
import escapeForXml from './lib/s3middleware/escapeForXml';
|
||||
import * as objectLegalHold from './lib/s3middleware/objectLegalHold';
|
||||
import * as tagging from './lib/s3middleware/tagging';
|
||||
import { checkDateModifiedHeaders } from './lib/s3middleware/validateConditionalHeaders';
|
||||
import { validateConditionalHeaders } from './lib/s3middleware/validateConditionalHeaders';
|
||||
import MD5Sum from './lib/s3middleware/MD5Sum';
|
||||
import NullStream from './lib/s3middleware/nullStream';
|
||||
import * as objectUtils from './lib/s3middleware/objectUtils';
|
||||
import * as mpuUtils from './lib/s3middleware/azureHelpers/mpuUtils';
|
||||
import ResultsCollector from './lib/s3middleware/azureHelpers/ResultsCollector';
|
||||
import SubStreamInterface from './lib/s3middleware/azureHelpers/SubStreamInterface';
|
||||
import { prepareStream } from './lib/s3middleware/prepareStream';
|
||||
import * as processMpuParts from './lib/s3middleware/processMpuParts';
|
||||
import * as retention from './lib/s3middleware/objectRetention';
|
||||
import * as objectRestore from './lib/s3middleware/objectRestore';
|
||||
import * as lifecycleHelpers from './lib/s3middleware/lifecycleHelpers';
|
||||
export { default as errors } from './lib/errors';
|
||||
export { default as Clustering } from './lib/Clustering';
|
||||
export * as ClusterRPC from './lib/clustering/ClusterRPC';
|
||||
export * as ipCheck from './lib/ipCheck';
|
||||
export * as auth from './lib/auth/auth';
|
||||
export * as constants from './lib/constants';
|
||||
export * as https from './lib/https';
|
||||
export * as metrics from './lib/metrics';
|
||||
export * as network from './lib/network';
|
||||
export * as s3routes from './lib/s3routes';
|
||||
export * as versioning from './lib/versioning';
|
||||
export * as stream from './lib/stream';
|
||||
export * as jsutil from './lib/jsutil';
|
||||
export { default as stringHash } from './lib/stringHash';
|
||||
export * as db from './lib/db';
|
||||
export { default as errors } from './lib/errors';
|
||||
export * as errorUtils from './lib/errorUtils';
|
||||
export { default as shuffle } from './lib/shuffle';
|
||||
export { default as stringHash } from './lib/stringHash';
|
||||
export * as ipCheck from './lib/ipCheck';
|
||||
export * as jsutil from './lib/jsutil';
|
||||
export * as https from './lib/https';
|
||||
export { default as Clustering } from './lib/Clustering';
|
||||
export * as algorithms from './lib/algos';
|
||||
export * as policies from './lib/policyEvaluator';
|
||||
export * as testing from './lib/testing';
|
||||
export * as versioning from './lib/versioning';
|
||||
export * as network from './lib/network';
|
||||
export * as s3routes from './lib/s3routes';
|
||||
export * as s3middleware from './lib/s3middleware';
|
||||
export * as models from './lib/models';
|
||||
export * as metrics from './lib/metrics';
|
||||
export * as stream from './lib/stream';
|
||||
|
||||
export const algorithms = {
|
||||
list: require('./lib/algos/list/exportAlgos'),
|
||||
listTools: {
|
||||
DelimiterTools: require('./lib/algos/list/tools'),
|
||||
Skip: require('./lib/algos/list/skip'),
|
||||
},
|
||||
cache: {
|
||||
GapSet: require('./lib/algos/cache/GapSet'),
|
||||
GapCache: require('./lib/algos/cache/GapCache'),
|
||||
LRUCache: require('./lib/algos/cache/LRUCache'),
|
||||
},
|
||||
stream: {
|
||||
MergeStream: require('./lib/algos/stream/MergeStream'),
|
||||
},
|
||||
SortedSet: require('./lib/algos/set/SortedSet'),
|
||||
Heap: require('./lib/algos/heap/Heap'),
|
||||
};
|
||||
|
||||
export const policies = {
|
||||
evaluators,
|
||||
validateUserPolicy,
|
||||
evaluatePrincipal,
|
||||
RequestContext,
|
||||
requestUtils,
|
||||
actionMaps,
|
||||
actionNeedQuotaCheck,
|
||||
actionWithDataDeletion,
|
||||
actionNeedQuotaCheckCopy,
|
||||
};
|
||||
|
||||
export const testing = {
|
||||
matrix: require('./lib/testing/matrix.js'),
|
||||
};
|
||||
|
||||
export const s3middleware = {
|
||||
userMetadata,
|
||||
convertToXml,
|
||||
escapeForXml,
|
||||
objectLegalHold,
|
||||
tagging,
|
||||
checkDateModifiedHeaders,
|
||||
validateConditionalHeaders,
|
||||
MD5Sum,
|
||||
NullStream,
|
||||
objectUtils,
|
||||
azureHelper: {
|
||||
mpuUtils,
|
||||
ResultsCollector,
|
||||
SubStreamInterface,
|
||||
},
|
||||
prepareStream,
|
||||
processMpuParts,
|
||||
retention,
|
||||
objectRestore,
|
||||
lifecycleHelpers,
|
||||
export const pensieve = {
|
||||
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
|
||||
};
|
||||
|
||||
export const storage = {
|
||||
metadata: {
|
||||
MetadataWrapper: require('./lib/storage/metadata/MetadataWrapper'),
|
||||
bucketclient: {
|
||||
BucketClientInterface:
|
||||
require('./lib/storage/metadata/bucketclient/' +
|
||||
'BucketClientInterface'),
|
||||
LogConsumer:
|
||||
require('./lib/storage/metadata/bucketclient/LogConsumer'),
|
||||
BucketClientInterface: require('./lib/storage/metadata/bucketclient/BucketClientInterface'),
|
||||
LogConsumer: require('./lib/storage/metadata/bucketclient/LogConsumer'),
|
||||
},
|
||||
file: {
|
||||
BucketFileInterface:
|
||||
require('./lib/storage/metadata/file/BucketFileInterface'),
|
||||
MetadataFileServer:
|
||||
require('./lib/storage/metadata/file/MetadataFileServer'),
|
||||
MetadataFileClient:
|
||||
require('./lib/storage/metadata/file/MetadataFileClient'),
|
||||
BucketFileInterface: require('./lib/storage/metadata/file/BucketFileInterface'),
|
||||
MetadataFileServer: require('./lib/storage/metadata/file/MetadataFileServer'),
|
||||
MetadataFileClient: require('./lib/storage/metadata/file/MetadataFileClient'),
|
||||
},
|
||||
inMemory: {
|
||||
metastore:
|
||||
require('./lib/storage/metadata/in_memory/metastore'),
|
||||
metastore: require('./lib/storage/metadata/in_memory/metastore'),
|
||||
metadata: require('./lib/storage/metadata/in_memory/metadata'),
|
||||
bucketUtilities:
|
||||
require('./lib/storage/metadata/in_memory/bucket_utilities'),
|
||||
bucketUtilities: require('./lib/storage/metadata/in_memory/bucket_utilities'),
|
||||
},
|
||||
mongoclient: {
|
||||
MongoClientInterface:
|
||||
require('./lib/storage/metadata/mongoclient/' +
|
||||
'MongoClientInterface'),
|
||||
LogConsumer:
|
||||
require('./lib/storage/metadata/mongoclient/LogConsumer'),
|
||||
MongoClientInterface: require('./lib/storage/metadata/mongoclient/MongoClientInterface'),
|
||||
LogConsumer: require('./lib/storage/metadata/mongoclient/LogConsumer'),
|
||||
},
|
||||
proxy: {
|
||||
Server: require('./lib/storage/metadata/proxy/Server'),
|
||||
|
@ -140,14 +51,11 @@ export const storage = {
|
|||
},
|
||||
data: {
|
||||
DataWrapper: require('./lib/storage/data/DataWrapper'),
|
||||
MultipleBackendGateway:
|
||||
require('./lib/storage/data/MultipleBackendGateway'),
|
||||
MultipleBackendGateway: require('./lib/storage/data/MultipleBackendGateway'),
|
||||
parseLC: require('./lib/storage/data/LocationConstraintParser'),
|
||||
file: {
|
||||
DataFileStore:
|
||||
require('./lib/storage/data/file/DataFileStore'),
|
||||
DataFileInterface:
|
||||
require('./lib/storage/data/file/DataFileInterface'),
|
||||
DataFileStore: require('./lib/storage/data/file/DataFileStore'),
|
||||
DataFileInterface: require('./lib/storage/data/file/DataFileInterface'),
|
||||
},
|
||||
external: {
|
||||
AwsClient: require('./lib/storage/data/external/AwsClient'),
|
||||
|
@ -165,11 +73,3 @@ export const storage = {
|
|||
},
|
||||
utils: require('./lib/storage/utils'),
|
||||
};
|
||||
|
||||
export const pensieve = {
|
||||
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
|
||||
};
|
||||
|
||||
export const patches = {
|
||||
locationConstraints,
|
||||
};
|
||||
|
|
|
@ -1,28 +1,16 @@
|
|||
import cluster, { Worker } from 'cluster';
|
||||
import * as werelogs from 'werelogs';
|
||||
import * as cluster from 'cluster';
|
||||
|
||||
export default class Clustering {
|
||||
_size: number;
|
||||
_shutdownTimeout: number;
|
||||
_logger: werelogs.Logger;
|
||||
_shutdown: boolean;
|
||||
_workers: (Worker | undefined)[];
|
||||
_workersTimeout: (NodeJS.Timeout | undefined)[];
|
||||
_workersStatus: (number | string | undefined)[];
|
||||
_status: number;
|
||||
_exitCb?: (clustering: Clustering, exitSignal?: string) => void;
|
||||
_index?: number;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param size Cluster size
|
||||
* @param logger Logger object
|
||||
* @param [shutdownTimeout=5000] Change default shutdown timeout
|
||||
* @param {number} size Cluster size
|
||||
* @param {Logger} logger Logger object
|
||||
* @param {number} [shutdownTimeout=5000] Change default shutdown timeout
|
||||
* releasing ressources
|
||||
* @return itself
|
||||
* @return {Clustering} itself
|
||||
*/
|
||||
constructor(size: number, logger: werelogs.Logger, shutdownTimeout?: number) {
|
||||
constructor(size, logger, shutdownTimeout) {
|
||||
this._size = size;
|
||||
if (size < 1) {
|
||||
throw new Error('Cluster size must be greater than or equal to 1');
|
||||
|
@ -42,6 +30,7 @@ export default class Clustering {
|
|||
* Method called after a stop() call
|
||||
*
|
||||
* @private
|
||||
* @return {undefined}
|
||||
*/
|
||||
_afterStop() {
|
||||
// Asuming all workers shutdown gracefully
|
||||
|
@ -50,11 +39,10 @@ export default class Clustering {
|
|||
for (let i = 0; i < size; ++i) {
|
||||
// If the process return an error code or killed by a signal,
|
||||
// set the status
|
||||
const status = this._workersStatus[i];
|
||||
if (typeof status === 'number') {
|
||||
this._status = status;
|
||||
if (typeof this._workersStatus[i] === 'number') {
|
||||
this._status = this._workersStatus[i];
|
||||
break;
|
||||
} else if (typeof status === 'string') {
|
||||
} else if (typeof this._workersStatus[i] === 'string') {
|
||||
this._status = 1;
|
||||
break;
|
||||
}
|
||||
|
@ -68,17 +56,13 @@ export default class Clustering {
|
|||
/**
|
||||
* Method called when a worker exited
|
||||
*
|
||||
* @param worker - Current worker
|
||||
* @param i - Worker index
|
||||
* @param code - Exit code
|
||||
* @param signal - Exit signal
|
||||
* @param {Cluster.worker} worker - Current worker
|
||||
* @param {number} i - Worker index
|
||||
* @param {number} code - Exit code
|
||||
* @param {string} signal - Exit signal
|
||||
* @return {undefined}
|
||||
*/
|
||||
_workerExited(
|
||||
worker: Worker,
|
||||
i: number,
|
||||
code: number,
|
||||
signal: string,
|
||||
) {
|
||||
_workerExited(worker, i, code, signal) {
|
||||
// If the worker:
|
||||
// - was killed by a signal
|
||||
// - return an error code
|
||||
|
@ -105,9 +89,8 @@ export default class Clustering {
|
|||
this._workersStatus[i] = undefined;
|
||||
}
|
||||
this._workers[i] = undefined;
|
||||
const timeout = this._workersTimeout[i];
|
||||
if (timeout) {
|
||||
clearTimeout(timeout);
|
||||
if (this._workersTimeout[i]) {
|
||||
clearTimeout(this._workersTimeout[i]);
|
||||
this._workersTimeout[i] = undefined;
|
||||
}
|
||||
// If we don't trigger the stop method, the watchdog
|
||||
|
@ -125,28 +108,29 @@ export default class Clustering {
|
|||
/**
|
||||
* Method to start a worker
|
||||
*
|
||||
* @param i Index of the starting worker
|
||||
* @param {number} i Index of the starting worker
|
||||
* @return {undefined}
|
||||
*/
|
||||
startWorker(i: number) {
|
||||
if (!cluster.isPrimary) {
|
||||
startWorker(i) {
|
||||
if (!cluster.isMaster) {
|
||||
return;
|
||||
}
|
||||
// Fork a new worker
|
||||
this._workers[i] = cluster.fork();
|
||||
// Listen for message from the worker
|
||||
this._workers[i]!.on('message', msg => {
|
||||
this._workers[i].on('message', msg => {
|
||||
// If the worker is ready, send him his id
|
||||
if (msg === 'ready') {
|
||||
this._workers[i]!.send({ msg: 'setup', id: i });
|
||||
this._workers[i].send({ msg: 'setup', id: i });
|
||||
}
|
||||
});
|
||||
this._workers[i]!.on('exit', (code, signal) =>
|
||||
this._workerExited(this._workers[i]!, i, code, signal));
|
||||
this._workers[i].on('exit', (code, signal) =>
|
||||
this._workerExited(this._workers[i], i, code, signal));
|
||||
// Trigger when the worker was started
|
||||
this._workers[i]!.on('online', () => {
|
||||
this._workers[i].on('online', () => {
|
||||
this._logger.info('Worker started', {
|
||||
id: i,
|
||||
childPid: this._workers[i]!.process.pid,
|
||||
childPid: this._workers[i].process.pid,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
@ -154,10 +138,10 @@ export default class Clustering {
|
|||
/**
|
||||
* Method to put handler on cluster exit
|
||||
*
|
||||
* @param cb - Callback(Clustering, [exitSignal])
|
||||
* @return Itself
|
||||
* @param {function} cb - Callback(Clustering, [exitSignal])
|
||||
* @return {Clustering} Itself
|
||||
*/
|
||||
onExit(cb: (clustering: Clustering, exitSignal?: string) => void) {
|
||||
onExit(cb) {
|
||||
this._exitCb = cb;
|
||||
return this;
|
||||
}
|
||||
|
@ -166,33 +150,33 @@ export default class Clustering {
|
|||
* Method to start the cluster (if master) or to start the callback
|
||||
* (worker)
|
||||
*
|
||||
* @param cb - Callback to run the worker
|
||||
* @return itself
|
||||
* @param {function} cb - Callback to run the worker
|
||||
* @return {Clustering} itself
|
||||
*/
|
||||
start(cb: (clustering: Clustering) => void) {
|
||||
start(cb) {
|
||||
process.on('SIGINT', () => this.stop('SIGINT'));
|
||||
process.on('SIGHUP', () => this.stop('SIGHUP'));
|
||||
process.on('SIGQUIT', () => this.stop('SIGQUIT'));
|
||||
process.on('SIGTERM', () => this.stop('SIGTERM'));
|
||||
process.on('SIGPIPE', () => {});
|
||||
process.on('exit', (code?: number, signal?: string) => {
|
||||
process.on('exit', (code, signal) => {
|
||||
if (this._exitCb) {
|
||||
this._status = code || 0;
|
||||
return this._exitCb(this, signal);
|
||||
}
|
||||
return process.exit(code || 0);
|
||||
});
|
||||
process.on('uncaughtException', (err: Error) => {
|
||||
process.on('uncaughtException', err => {
|
||||
this._logger.fatal('caught error', {
|
||||
error: err.message,
|
||||
stack: err.stack?.split('\n')?.map(str => str.trim()),
|
||||
stack: err.stack.split('\n').map(str => str.trim()),
|
||||
});
|
||||
process.exit(1);
|
||||
});
|
||||
if (!cluster.isPrimary) {
|
||||
if (!cluster.isMaster) {
|
||||
// Waiting for message from master to
|
||||
// know the id of the slave cluster
|
||||
process.on('message', (msg: any) => {
|
||||
process.on('message', msg => {
|
||||
if (msg.msg === 'setup') {
|
||||
this._index = msg.id;
|
||||
cb(this);
|
||||
|
@ -200,7 +184,7 @@ export default class Clustering {
|
|||
});
|
||||
// Send message to the master, to let him know
|
||||
// the worker has started
|
||||
process.send?.('ready');
|
||||
process.send('ready');
|
||||
} else {
|
||||
for (let i = 0; i < this._size; ++i) {
|
||||
this.startWorker(i);
|
||||
|
@ -212,7 +196,7 @@ export default class Clustering {
|
|||
/**
|
||||
* Method to get workers
|
||||
*
|
||||
* @return Workers
|
||||
* @return {Cluster.Worker[]} Workers
|
||||
*/
|
||||
getWorkers() {
|
||||
return this._workers;
|
||||
|
@ -221,7 +205,7 @@ export default class Clustering {
|
|||
/**
|
||||
* Method to get the status of the cluster
|
||||
*
|
||||
* @return Status code
|
||||
* @return {number} Status code
|
||||
*/
|
||||
getStatus() {
|
||||
return this._status;
|
||||
|
@ -230,7 +214,7 @@ export default class Clustering {
|
|||
/**
|
||||
* Method to return if it's the master process
|
||||
*
|
||||
* @return - True if master, false otherwise
|
||||
* @return {boolean} - True if master, false otherwise
|
||||
*/
|
||||
isMaster() {
|
||||
return this._index === undefined;
|
||||
|
@ -239,7 +223,7 @@ export default class Clustering {
|
|||
/**
|
||||
* Method to get index of the worker
|
||||
*
|
||||
* @return Worker index, undefined if it's master
|
||||
* @return {number|undefined} Worker index, undefined if it's master
|
||||
*/
|
||||
getIndex() {
|
||||
return this._index;
|
||||
|
@ -248,10 +232,11 @@ export default class Clustering {
|
|||
/**
|
||||
* Method to stop the cluster
|
||||
*
|
||||
* @param signal - Set internally when processes killed by signal
|
||||
* @param {string} signal - Set internally when processes killed by signal
|
||||
* @return {undefined}
|
||||
*/
|
||||
stop(signal?: string) {
|
||||
if (!cluster.isPrimary) {
|
||||
stop(signal) {
|
||||
if (!cluster.isMaster) {
|
||||
if (this._exitCb) {
|
||||
return this._exitCb(this, signal);
|
||||
}
|
||||
|
@ -264,17 +249,11 @@ export default class Clustering {
|
|||
}
|
||||
this._workersTimeout[i] = setTimeout(() => {
|
||||
// Kill the worker if the sigterm was ignored or take too long
|
||||
if (worker.process.pid) {
|
||||
process.kill(worker.process.pid, 'SIGKILL');
|
||||
}
|
||||
}, this._shutdownTimeout);
|
||||
// Send sigterm to the process, allowing to release ressources
|
||||
// and save some states
|
||||
if (worker.process.pid) {
|
||||
return process.kill(worker.process.pid, 'SIGTERM');
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,363 +0,0 @@
|
|||
import { OrderedSet } from '@js-sdsl/ordered-set';
|
||||
import {
|
||||
default as GapSet,
|
||||
GapSetEntry,
|
||||
} from './GapSet';
|
||||
|
||||
// the API is similar but is not strictly a superset of GapSetInterface
|
||||
// so we don't extend from it
|
||||
export interface GapCacheInterface {
|
||||
exposureDelayMs: number;
|
||||
maxGapWeight: number;
|
||||
size: number;
|
||||
|
||||
setGap: (firstKey: string, lastKey: string, weight: number) => void;
|
||||
removeOverlappingGaps: (overlappingKeys: string[]) => number;
|
||||
lookupGap: (minKey: string, maxKey?: string) => Promise<GapSetEntry | null>;
|
||||
[Symbol.iterator]: () => Iterator<GapSetEntry>;
|
||||
toArray: () => GapSetEntry[];
|
||||
};
|
||||
|
||||
class GapCacheUpdateSet {
|
||||
newGaps: GapSet;
|
||||
updatedKeys: OrderedSet<string>;
|
||||
|
||||
constructor(maxGapWeight: number) {
|
||||
this.newGaps = new GapSet(maxGapWeight);
|
||||
this.updatedKeys = new OrderedSet();
|
||||
}
|
||||
|
||||
addUpdateBatch(updatedKeys: OrderedSet<string>): void {
|
||||
this.updatedKeys.union(updatedKeys);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Cache of listing "gaps" i.e. ranges of keys that can be skipped
|
||||
* over during listing (because they only contain delete markers as
|
||||
* latest versions).
|
||||
*
|
||||
* Typically, a single GapCache instance would be attached to a raft session.
|
||||
*
|
||||
* The API usage is as follows:
|
||||
*
|
||||
* - Initialize a GapCache instance by calling start() (this starts an internal timer)
|
||||
*
|
||||
* - Insert a gap or update an existing one via setGap()
|
||||
*
|
||||
* - Lookup existing gaps via lookupGap()
|
||||
*
|
||||
* - Invalidate gaps that overlap a specific set of keys via removeOverlappingGaps()
|
||||
*
|
||||
* - Shut down a GapCache instance by calling stop() (this stops the internal timer)
|
||||
*
|
||||
* Gaps inserted via setGap() are not exposed immediately to lookupGap(), but only:
|
||||
*
|
||||
* - after a certain delay always larger than 'exposureDelayMs' and usually shorter
|
||||
* than twice this value (but might be slightly longer in rare cases)
|
||||
*
|
||||
* - and only if they haven't been invalidated by a recent call to removeOverlappingGaps()
|
||||
*
|
||||
* This ensures atomicity between gap creation and invalidation from updates under
|
||||
* the condition that a gap is created from first key to last key within the time defined
|
||||
* by 'exposureDelayMs'.
|
||||
*
|
||||
* The implementation is based on two extra temporary "update sets" on top of the main
|
||||
* exposed gap set, one called "staging" and the other "frozen", each containing a
|
||||
* temporary updated gap set and a list of updated keys to invalidate gaps with (coming
|
||||
* from calls to removeOverlappingGaps()). Every "exposureDelayMs" milliseconds, the frozen
|
||||
* gaps are invalidated by all key updates coming from either of the "staging" or "frozen"
|
||||
* update set, then merged into the exposed gaps set, after which the staging updates become
|
||||
* the frozen updates and won't receive any new gap until the next cycle.
|
||||
*/
|
||||
export default class GapCache implements GapCacheInterface {
|
||||
_exposureDelayMs: number;
|
||||
maxGaps: number;
|
||||
|
||||
_stagingUpdates: GapCacheUpdateSet;
|
||||
_frozenUpdates: GapCacheUpdateSet;
|
||||
_exposedGaps: GapSet;
|
||||
_exposeFrozenInterval: NodeJS.Timeout | null;
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
*
|
||||
* @param {number} exposureDelayMs - minimum delay between
|
||||
* insertion of a gap via setGap() and its exposure via
|
||||
* lookupGap()
|
||||
* @param {number} maxGaps - maximum number of cached gaps, after
|
||||
* which no new gap can be added by setGap(). (Note: a future
|
||||
* improvement could replace this by an eviction strategy)
|
||||
* @param {number} maxGapWeight - maximum "weight" of individual
|
||||
* cached gaps, which is also the granularity for
|
||||
* invalidation. Individual gaps can be chained together,
|
||||
* which lookupGap() transparently consolidates in the response
|
||||
* into a single large gap.
|
||||
*/
|
||||
constructor(exposureDelayMs: number, maxGaps: number, maxGapWeight: number) {
|
||||
this._exposureDelayMs = exposureDelayMs;
|
||||
this.maxGaps = maxGaps;
|
||||
|
||||
this._stagingUpdates = new GapCacheUpdateSet(maxGapWeight);
|
||||
this._frozenUpdates = new GapCacheUpdateSet(maxGapWeight);
|
||||
this._exposedGaps = new GapSet(maxGapWeight);
|
||||
this._exposeFrozenInterval = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a GapCache from an array of exposed gap entries (used in tests)
|
||||
*
|
||||
* @return {GapCache} - a new GapCache instance
|
||||
*/
|
||||
static createFromArray(
|
||||
gaps: GapSetEntry[],
|
||||
exposureDelayMs: number,
|
||||
maxGaps: number,
|
||||
maxGapWeight: number
|
||||
): GapCache {
|
||||
const gapCache = new GapCache(exposureDelayMs, maxGaps, maxGapWeight);
|
||||
gapCache._exposedGaps = GapSet.createFromArray(gaps, maxGapWeight)
|
||||
return gapCache;
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal helper to remove gaps in the staging and frozen sets
|
||||
* overlapping with previously updated keys, right before the
|
||||
* frozen gaps get exposed.
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
_removeOverlappingGapsBeforeExpose(): void {
|
||||
for (const { updatedKeys } of [this._stagingUpdates, this._frozenUpdates]) {
|
||||
if (updatedKeys.size() === 0) {
|
||||
continue;
|
||||
}
|
||||
for (const { newGaps } of [this._stagingUpdates, this._frozenUpdates]) {
|
||||
if (newGaps.size === 0) {
|
||||
continue;
|
||||
}
|
||||
newGaps.removeOverlappingGaps(updatedKeys);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is the core mechanism that updates the exposed gaps in the
|
||||
* cache. It is called on a regular interval defined by 'exposureDelayMs'.
|
||||
*
|
||||
* It does the following in order:
|
||||
*
|
||||
* - remove gaps from the frozen set that overlap with any key present in a
|
||||
* batch passed to removeOverlappingGaps() since the last two triggers of
|
||||
* _exposeFrozen()
|
||||
*
|
||||
* - merge the remaining gaps from the frozen set to the exposed set, which
|
||||
* makes them visible from calls to lookupGap()
|
||||
*
|
||||
* - rotate by freezing the currently staging updates and initiating a new
|
||||
* staging updates set
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
_exposeFrozen(): void {
|
||||
this._removeOverlappingGapsBeforeExpose();
|
||||
for (const gap of this._frozenUpdates.newGaps) {
|
||||
// Use a trivial strategy to keep the cache size within
|
||||
// limits: refuse to add new gaps when the size is above
|
||||
// the 'maxGaps' threshold. We solely rely on
|
||||
// removeOverlappingGaps() to make space for new gaps.
|
||||
if (this._exposedGaps.size < this.maxGaps) {
|
||||
this._exposedGaps.setGap(gap.firstKey, gap.lastKey, gap.weight);
|
||||
}
|
||||
}
|
||||
this._frozenUpdates = this._stagingUpdates;
|
||||
this._stagingUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the internal GapCache timer
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
start(): void {
|
||||
if (this._exposeFrozenInterval) {
|
||||
return;
|
||||
}
|
||||
this._exposeFrozenInterval = setInterval(
|
||||
() => this._exposeFrozen(),
|
||||
this._exposureDelayMs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the internal GapCache timer
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
stop(): void {
|
||||
if (this._exposeFrozenInterval) {
|
||||
clearInterval(this._exposeFrozenInterval);
|
||||
this._exposeFrozenInterval = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Record a gap between two keys, associated with a weight to
|
||||
* limit individual gap's spanning ranges in the cache, for a more
|
||||
* granular invalidation.
|
||||
*
|
||||
* The function handles splitting and merging existing gaps to
|
||||
* maintain an optimal weight of cache entries.
|
||||
*
|
||||
* NOTE 1: the caller must ensure that the full length of the gap
|
||||
* between 'firstKey' and 'lastKey' has been built from a listing
|
||||
* snapshot that is more recent than 'exposureDelayMs' milliseconds,
|
||||
* in order to guarantee that the exposed gap will be fully
|
||||
* covered (and potentially invalidated) from recent calls to
|
||||
* removeOverlappingGaps().
|
||||
*
|
||||
* NOTE 2: a usual pattern when building a large gap from multiple
|
||||
* calls to setGap() is to start the next gap from 'lastKey',
|
||||
* which will be passed as 'firstKey' in the next call, so that
|
||||
* gaps can be chained together and consolidated by lookupGap().
|
||||
*
|
||||
* @param {string} firstKey - first key of the gap
|
||||
* @param {string} lastKey - last key of the gap, must be greater
|
||||
* or equal than 'firstKey'
|
||||
* @param {number} weight - total weight between 'firstKey' and 'lastKey'
|
||||
* @return {undefined}
|
||||
*/
|
||||
setGap(firstKey: string, lastKey: string, weight: number): void {
|
||||
this._stagingUpdates.newGaps.setGap(firstKey, lastKey, weight);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove gaps that overlap with a given set of keys. Used to
|
||||
* invalidate gaps when keys are inserted or deleted.
|
||||
*
|
||||
* @param {OrderedSet<string> | string[]} overlappingKeys - remove gaps that
|
||||
* overlap with any of this set of keys
|
||||
* @return {number} - how many gaps were removed from the exposed
|
||||
* gaps only (overlapping gaps not yet exposed are also invalidated
|
||||
* but are not accounted for in the returned value)
|
||||
*/
|
||||
removeOverlappingGaps(overlappingKeys: OrderedSet<string> | string[]): number {
|
||||
let overlappingKeysSet;
|
||||
if (Array.isArray(overlappingKeys)) {
|
||||
overlappingKeysSet = new OrderedSet(overlappingKeys);
|
||||
} else {
|
||||
overlappingKeysSet = overlappingKeys;
|
||||
}
|
||||
this._stagingUpdates.addUpdateBatch(overlappingKeysSet);
|
||||
return this._exposedGaps.removeOverlappingGaps(overlappingKeysSet);
|
||||
}
|
||||
|
||||
/**
|
||||
* Lookup the next exposed gap that overlaps with [minKey, maxKey]. Internally
|
||||
* chained gaps are coalesced in the response into a single contiguous large gap.
|
||||
*
|
||||
* @param {string} minKey - minimum key overlapping with the returned gap
|
||||
* @param {string} [maxKey] - maximum key overlapping with the returned gap
|
||||
* @return {Promise<GapSetEntry | null>} - result of the lookup if a gap
|
||||
* was found, null otherwise, as a Promise
|
||||
*/
|
||||
lookupGap(minKey: string, maxKey?: string): Promise<GapSetEntry | null> {
|
||||
return this._exposedGaps.lookupGap(minKey, maxKey);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the maximum weight setting for individual gaps.
|
||||
*
|
||||
* @return {number} - maximum weight of individual gaps
|
||||
*/
|
||||
get maxGapWeight(): number {
|
||||
return this._exposedGaps.maxWeight;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the maximum weight setting for individual gaps.
|
||||
*
|
||||
* @param {number} gapWeight - maximum weight of individual gaps
|
||||
*/
|
||||
set maxGapWeight(gapWeight: number) {
|
||||
this._exposedGaps.maxWeight = gapWeight;
|
||||
// also update transient gap sets
|
||||
this._stagingUpdates.newGaps.maxWeight = gapWeight;
|
||||
this._frozenUpdates.newGaps.maxWeight = gapWeight;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the exposure delay in milliseconds, which is the minimum
|
||||
* time after which newly cached gaps will be exposed by
|
||||
* lookupGap().
|
||||
*
|
||||
* @return {number} - exposure delay in milliseconds
|
||||
*/
|
||||
get exposureDelayMs(): number {
|
||||
return this._exposureDelayMs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the exposure delay in milliseconds, which is the minimum
|
||||
* time after which newly cached gaps will be exposed by
|
||||
* lookupGap(). Setting this attribute automatically updates the
|
||||
* internal state to honor the new value.
|
||||
*
|
||||
* @param {number} - exposure delay in milliseconds
|
||||
*/
|
||||
set exposureDelayMs(exposureDelayMs: number) {
|
||||
if (exposureDelayMs !== this._exposureDelayMs) {
|
||||
this._exposureDelayMs = exposureDelayMs;
|
||||
if (this._exposeFrozenInterval) {
|
||||
// invalidate all pending gap updates, as the new interval may not be
|
||||
// safe for them
|
||||
this._stagingUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
||||
this._frozenUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
||||
|
||||
// reinitialize the _exposeFrozenInterval timer with the updated delay
|
||||
this.stop();
|
||||
this.start();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the number of exposed gaps
|
||||
*
|
||||
* @return {number} number of exposed gaps
|
||||
*/
|
||||
get size(): number {
|
||||
return this._exposedGaps.size;
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate over exposed gaps
|
||||
*
|
||||
* @return {Iterator<GapSetEntry>} an iterator over exposed gaps
|
||||
*/
|
||||
[Symbol.iterator](): Iterator<GapSetEntry> {
|
||||
return this._exposedGaps[Symbol.iterator]();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an array of all exposed gaps
|
||||
*
|
||||
* @return {GapSetEntry[]} array of exposed gaps
|
||||
*/
|
||||
toArray(): GapSetEntry[] {
|
||||
return this._exposedGaps.toArray();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all exposed and staging gaps from the cache.
|
||||
*
|
||||
* Note: retains invalidating updates from removeOverlappingGaps()
|
||||
* for correctness of gaps inserted afterwards.
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
clear(): void {
|
||||
this._stagingUpdates.newGaps = new GapSet(this.maxGapWeight);
|
||||
this._frozenUpdates.newGaps = new GapSet(this.maxGapWeight);
|
||||
this._exposedGaps = new GapSet(this.maxGapWeight);
|
||||
}
|
||||
}
|
|
@ -1,366 +0,0 @@
|
|||
import assert from 'assert';
|
||||
import { OrderedSet } from '@js-sdsl/ordered-set';
|
||||
|
||||
import errors from '../../errors';
|
||||
|
||||
export type GapSetEntry = {
|
||||
firstKey: string,
|
||||
lastKey: string,
|
||||
weight: number,
|
||||
};
|
||||
|
||||
export interface GapSetInterface {
|
||||
maxWeight: number;
|
||||
size: number;
|
||||
|
||||
setGap: (firstKey: string, lastKey: string, weight: number) => GapSetEntry;
|
||||
removeOverlappingGaps: (overlappingKeys: string[]) => number;
|
||||
lookupGap: (minKey: string, maxKey?: string) => Promise<GapSetEntry | null>;
|
||||
[Symbol.iterator]: () => Iterator<GapSetEntry>;
|
||||
toArray: () => GapSetEntry[];
|
||||
};
|
||||
|
||||
/**
|
||||
* Specialized data structure to support caching of listing "gaps",
|
||||
* i.e. ranges of keys that can be skipped over during listing
|
||||
* (because they only contain delete markers as latest versions)
|
||||
*/
|
||||
export default class GapSet implements GapSetInterface, Iterable<GapSetEntry> {
|
||||
_gaps: OrderedSet<GapSetEntry>;
|
||||
_maxWeight: number;
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
|
||||
* @param {number} maxWeight - weight threshold for each cached
|
||||
* gap (unitless). Triggers splitting gaps when reached
|
||||
*/
|
||||
constructor(maxWeight: number) {
|
||||
this._gaps = new OrderedSet(
|
||||
[],
|
||||
(left: GapSetEntry, right: GapSetEntry) => (
|
||||
left.firstKey < right.firstKey ? -1 :
|
||||
left.firstKey > right.firstKey ? 1 : 0
|
||||
)
|
||||
);
|
||||
this._maxWeight = maxWeight;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a GapSet from an array of gap entries (used in tests)
|
||||
*/
|
||||
static createFromArray(gaps: GapSetEntry[], maxWeight: number): GapSet {
|
||||
const gapSet = new GapSet(maxWeight);
|
||||
for (const gap of gaps) {
|
||||
gapSet._gaps.insert(gap);
|
||||
}
|
||||
return gapSet;
|
||||
}
|
||||
|
||||
/**
|
||||
* Record a gap between two keys, associated with a weight to limit
|
||||
* individual gap sizes in the cache.
|
||||
*
|
||||
* The function handles splitting and merging existing gaps to
|
||||
* maintain an optimal weight of cache entries.
|
||||
*
|
||||
* @param {string} firstKey - first key of the gap
|
||||
* @param {string} lastKey - last key of the gap, must be greater
|
||||
* or equal than 'firstKey'
|
||||
* @param {number} weight - total weight between 'firstKey' and 'lastKey'
|
||||
* @return {GapSetEntry} - existing or new gap entry
|
||||
*/
|
||||
setGap(firstKey: string, lastKey: string, weight: number): GapSetEntry {
|
||||
assert(lastKey >= firstKey);
|
||||
|
||||
// Step 1/4: Find the closest left-overlapping gap, and either re-use it
|
||||
// or chain it with a new gap depending on the weights if it exists (otherwise
|
||||
// just creates a new gap).
|
||||
const curGapIt = this._gaps.reverseLowerBound(<GapSetEntry>{ firstKey });
|
||||
let curGap;
|
||||
if (curGapIt.isAccessible()) {
|
||||
curGap = curGapIt.pointer;
|
||||
if (curGap.lastKey >= lastKey) {
|
||||
// return fully overlapping gap already cached
|
||||
return curGap;
|
||||
}
|
||||
}
|
||||
let remainingWeight = weight;
|
||||
if (!curGap // no previous gap
|
||||
|| curGap.lastKey < firstKey // previous gap not overlapping
|
||||
|| (curGap.lastKey === firstKey // previous gap overlapping by one key...
|
||||
&& curGap.weight + weight > this._maxWeight) // ...but we can't extend it
|
||||
) {
|
||||
// create a new gap indexed by 'firstKey'
|
||||
curGap = { firstKey, lastKey: firstKey, weight: 0 };
|
||||
this._gaps.insert(curGap);
|
||||
} else if (curGap.lastKey > firstKey && weight > this._maxWeight) {
|
||||
// previous gap is either fully or partially contained in the new gap
|
||||
// and cannot be extended: substract its weight from the total (heuristic
|
||||
// in case the previous gap doesn't start at 'firstKey', which is the
|
||||
// uncommon case)
|
||||
remainingWeight -= curGap.weight;
|
||||
|
||||
// there may be an existing chained gap starting with the previous gap's
|
||||
// 'lastKey': use it if it exists
|
||||
const chainedGapIt = this._gaps.find(<GapSetEntry>{ firstKey: curGap.lastKey });
|
||||
if (chainedGapIt.isAccessible()) {
|
||||
curGap = chainedGapIt.pointer;
|
||||
} else {
|
||||
// no existing chained gap: chain a new gap to the previous gap
|
||||
curGap = {
|
||||
firstKey: curGap.lastKey,
|
||||
lastKey: curGap.lastKey,
|
||||
weight: 0,
|
||||
};
|
||||
this._gaps.insert(curGap);
|
||||
}
|
||||
}
|
||||
// Step 2/4: Cleanup existing gaps fully included in firstKey -> lastKey, and
|
||||
// aggregate their weights in curGap to define the minimum weight up to the
|
||||
// last merged gap.
|
||||
let nextGap;
|
||||
while (true) {
|
||||
const nextGapIt = this._gaps.upperBound(<GapSetEntry>{ firstKey: curGap.firstKey });
|
||||
nextGap = nextGapIt.isAccessible() && nextGapIt.pointer;
|
||||
// stop the cleanup when no more gap or if the next gap is not fully
|
||||
// included in curGap
|
||||
if (!nextGap || nextGap.lastKey > lastKey) {
|
||||
break;
|
||||
}
|
||||
this._gaps.eraseElementByIterator(nextGapIt);
|
||||
curGap.lastKey = nextGap.lastKey;
|
||||
curGap.weight += nextGap.weight;
|
||||
}
|
||||
|
||||
// Step 3/4: Extend curGap to lastKey, adjusting the weight.
|
||||
// At this point, curGap weight is the minimum weight of the finished gap, save it
|
||||
// for step 4.
|
||||
let minMergedWeight = curGap.weight;
|
||||
if (curGap.lastKey === firstKey && firstKey !== lastKey) {
|
||||
// extend the existing gap by the full amount 'firstKey -> lastKey'
|
||||
curGap.lastKey = lastKey;
|
||||
curGap.weight += remainingWeight;
|
||||
} else if (curGap.lastKey <= lastKey) {
|
||||
curGap.lastKey = lastKey;
|
||||
curGap.weight = remainingWeight;
|
||||
}
|
||||
|
||||
// Step 4/4: Find the closest right-overlapping gap, and if it exists, either merge
|
||||
// it or chain it with curGap depending on the weights.
|
||||
if (nextGap && nextGap.firstKey <= lastKey) {
|
||||
// nextGap overlaps with the new gap: check if we can merge it
|
||||
minMergedWeight += nextGap.weight;
|
||||
let mergedWeight;
|
||||
if (lastKey === nextGap.firstKey) {
|
||||
// nextGap is chained with curGap: add the full weight of nextGap
|
||||
mergedWeight = curGap.weight + nextGap.weight;
|
||||
} else {
|
||||
// strict overlap: don't add nextGap's weight unless
|
||||
// it's larger than the sum of merged ranges (as it is
|
||||
// then included in `minMergedWeight`)
|
||||
mergedWeight = Math.max(curGap.weight, minMergedWeight);
|
||||
}
|
||||
if (mergedWeight <= this._maxWeight) {
|
||||
// merge nextGap into curGap
|
||||
curGap.lastKey = nextGap.lastKey;
|
||||
curGap.weight = mergedWeight;
|
||||
this._gaps.eraseElementByKey(nextGap);
|
||||
} else {
|
||||
// adjust the last key to chain with nextGap and substract the next
|
||||
// gap's weight from curGap (heuristic)
|
||||
curGap.lastKey = nextGap.firstKey;
|
||||
curGap.weight = Math.max(mergedWeight - nextGap.weight, 0);
|
||||
curGap = nextGap;
|
||||
}
|
||||
}
|
||||
// return a copy of curGap
|
||||
return Object.assign({}, curGap);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove gaps that overlap with one or more keys in a given array or
|
||||
* OrderedSet. Used to invalidate gaps when keys are inserted or deleted.
|
||||
*
|
||||
* @param {OrderedSet<string> | string[]} overlappingKeys - remove gaps that overlap
|
||||
* with any of this set of keys
|
||||
* @return {number} - how many gaps were removed
|
||||
*/
|
||||
removeOverlappingGaps(overlappingKeys: OrderedSet<string> | string[]): number {
|
||||
// To optimize processing with a large number of keys and/or gaps, this function:
|
||||
//
|
||||
// 1. converts the overlappingKeys array to a OrderedSet (if not already a OrderedSet)
|
||||
// 2. queries both the gaps set and the overlapping keys set in a loop, which allows:
|
||||
// - skipping ranges of overlapping keys at once when there is no new overlapping gap
|
||||
// - skipping ranges of gaps at once when there is no overlapping key
|
||||
//
|
||||
// This way, it is efficient when the number of non-overlapping gaps is large
|
||||
// (which is the most common case in practice).
|
||||
|
||||
let overlappingKeysSet;
|
||||
if (Array.isArray(overlappingKeys)) {
|
||||
overlappingKeysSet = new OrderedSet(overlappingKeys);
|
||||
} else {
|
||||
overlappingKeysSet = overlappingKeys;
|
||||
}
|
||||
const firstKeyIt = overlappingKeysSet.begin();
|
||||
let currentKey = firstKeyIt.isAccessible() && firstKeyIt.pointer;
|
||||
let nRemoved = 0;
|
||||
while (currentKey) {
|
||||
const closestGapIt = this._gaps.reverseUpperBound(<GapSetEntry>{ firstKey: currentKey });
|
||||
if (closestGapIt.isAccessible()) {
|
||||
const closestGap = closestGapIt.pointer;
|
||||
if (currentKey <= closestGap.lastKey) {
|
||||
// currentKey overlaps closestGap: remove the gap
|
||||
this._gaps.eraseElementByIterator(closestGapIt);
|
||||
nRemoved += 1;
|
||||
}
|
||||
}
|
||||
const nextGapIt = this._gaps.lowerBound(<GapSetEntry>{ firstKey: currentKey });
|
||||
if (!nextGapIt.isAccessible()) {
|
||||
// no more gap: we're done
|
||||
return nRemoved;
|
||||
}
|
||||
const nextGap = nextGapIt.pointer;
|
||||
// advance to the last key potentially overlapping with nextGap
|
||||
let currentKeyIt = overlappingKeysSet.reverseLowerBound(nextGap.lastKey);
|
||||
if (currentKeyIt.isAccessible()) {
|
||||
currentKey = currentKeyIt.pointer;
|
||||
if (currentKey >= nextGap.firstKey) {
|
||||
// currentKey overlaps nextGap: remove the gap
|
||||
this._gaps.eraseElementByIterator(nextGapIt);
|
||||
nRemoved += 1;
|
||||
}
|
||||
}
|
||||
// advance to the first key potentially overlapping with another gap
|
||||
currentKeyIt = overlappingKeysSet.lowerBound(nextGap.lastKey);
|
||||
currentKey = currentKeyIt.isAccessible() && currentKeyIt.pointer;
|
||||
}
|
||||
return nRemoved;
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal helper to coalesce multiple chained gaps into a single gap.
|
||||
*
|
||||
* It is only used to construct lookupGap() return values and
|
||||
* doesn't modify the GapSet.
|
||||
*
|
||||
* NOTE: The function may take a noticeable amount of time and CPU
|
||||
* to execute if a large number of chained gaps have to be
|
||||
* coalesced, but it should never take more than a few seconds. In
|
||||
* most cases it should take less than a millisecond. It regularly
|
||||
* yields to the nodejs event loop to avoid blocking it during a
|
||||
* long execution.
|
||||
*
|
||||
* @param {GapSetEntry} firstGap - first gap of the chain to coalesce with
|
||||
* the next ones in the chain
|
||||
* @return {Promise<GapSetEntry>} - a new coalesced entry, as a Promise
|
||||
*/
|
||||
_coalesceGapChain(firstGap: GapSetEntry): Promise<GapSetEntry> {
|
||||
return new Promise(resolve => {
|
||||
const coalescedGap: GapSetEntry = Object.assign({}, firstGap);
|
||||
const coalesceGapChainIteration = () => {
|
||||
// efficiency trade-off: 100 iterations of log(N) complexity lookups should
|
||||
// not block the event loop for too long
|
||||
for (let opCounter = 0; opCounter < 100; ++opCounter) {
|
||||
const chainedGapIt = this._gaps.find(
|
||||
<GapSetEntry>{ firstKey: coalescedGap.lastKey });
|
||||
if (!chainedGapIt.isAccessible()) {
|
||||
// chain is complete
|
||||
return resolve(coalescedGap);
|
||||
}
|
||||
const chainedGap = chainedGapIt.pointer;
|
||||
if (chainedGap.firstKey === chainedGap.lastKey) {
|
||||
// found a single-key gap: chain is complete
|
||||
return resolve(coalescedGap);
|
||||
}
|
||||
coalescedGap.lastKey = chainedGap.lastKey;
|
||||
coalescedGap.weight += chainedGap.weight;
|
||||
}
|
||||
// yield to the event loop before continuing the process
|
||||
// of coalescing the gap chain
|
||||
return process.nextTick(coalesceGapChainIteration);
|
||||
};
|
||||
coalesceGapChainIteration();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Lookup the next gap that overlaps with [minKey, maxKey]. Internally chained
|
||||
* gaps are coalesced in the response into a single contiguous large gap.
|
||||
*
|
||||
* @param {string} minKey - minimum key overlapping with the returned gap
|
||||
* @param {string} [maxKey] - maximum key overlapping with the returned gap
|
||||
* @return {Promise<GapSetEntry | null>} - result of the lookup if a gap
|
||||
* was found, null otherwise, as a Promise
|
||||
*/
|
||||
async lookupGap(minKey: string, maxKey?: string): Promise<GapSetEntry | null> {
|
||||
let firstGap: GapSetEntry | null = null;
|
||||
const minGapIt = this._gaps.reverseLowerBound(<GapSetEntry>{ firstKey: minKey });
|
||||
const minGap = minGapIt.isAccessible() && minGapIt.pointer;
|
||||
if (minGap && minGap.lastKey >= minKey) {
|
||||
firstGap = minGap;
|
||||
} else {
|
||||
const maxGapIt = this._gaps.upperBound(<GapSetEntry>{ firstKey: minKey });
|
||||
const maxGap = maxGapIt.isAccessible() && maxGapIt.pointer;
|
||||
if (maxGap && (maxKey === undefined || maxGap.firstKey <= maxKey)) {
|
||||
firstGap = maxGap;
|
||||
}
|
||||
}
|
||||
if (!firstGap) {
|
||||
return null;
|
||||
}
|
||||
return this._coalesceGapChain(firstGap);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the maximum weight setting for individual gaps.
|
||||
*
|
||||
* @return {number} - maximum weight of individual gaps
|
||||
*/
|
||||
get maxWeight(): number {
|
||||
return this._maxWeight;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the maximum weight setting for individual gaps.
|
||||
*
|
||||
* @param {number} gapWeight - maximum weight of individual gaps
|
||||
*/
|
||||
set maxWeight(gapWeight: number) {
|
||||
this._maxWeight = gapWeight;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the number of gaps stored in this set.
|
||||
*
|
||||
* @return {number} - number of gaps stored in this set
|
||||
*/
|
||||
get size(): number {
|
||||
return this._gaps.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate over each gap of the set, ordered by first key
|
||||
*
|
||||
* @return {Iterator<GapSetEntry>} - an iterator over all gaps
|
||||
* Example:
|
||||
* for (const gap of myGapSet) { ... }
|
||||
*/
|
||||
[Symbol.iterator](): Iterator<GapSetEntry> {
|
||||
return this._gaps[Symbol.iterator]();
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an array containing all gaps, ordered by first key
|
||||
*
|
||||
* NOTE: there is a toArray() method in the OrderedSet implementation
|
||||
* but it does not scale well and overflows the stack quickly. This is
|
||||
* why we provide an implementation based on an iterator.
|
||||
*
|
||||
* @return {GapSetEntry[]} - an array containing all gaps
|
||||
*/
|
||||
toArray(): GapSetEntry[] {
|
||||
return [...this];
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
const assert = require('assert');
|
||||
import assert from 'assert';
|
||||
|
||||
/**
|
||||
* @class
|
||||
|
@ -6,13 +6,19 @@ const assert = require('assert');
|
|||
* number of items and a Least Recently Used (LRU) strategy for
|
||||
* eviction.
|
||||
*/
|
||||
class LRUCache {
|
||||
export default class LRUCache {
|
||||
_maxEntries;
|
||||
_entryMap;
|
||||
_entryCount;
|
||||
_lruTail;
|
||||
_lruHead;
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param {number} maxEntries - maximum number of entries kept in
|
||||
* @param maxEntries - maximum number of entries kept in
|
||||
* the cache
|
||||
*/
|
||||
constructor(maxEntries) {
|
||||
constructor(maxEntries: number) {
|
||||
assert(maxEntries >= 1);
|
||||
this._maxEntries = maxEntries;
|
||||
this.clear();
|
||||
|
@ -22,12 +28,12 @@ class LRUCache {
|
|||
* Add or update the value associated to a key in the cache,
|
||||
* making it the most recently accessed for eviction purpose.
|
||||
*
|
||||
* @param {string} key - key to add
|
||||
* @param {object} value - associated value (can be of any type)
|
||||
* @return {boolean} true if the cache contained an entry with
|
||||
* @param key - key to add
|
||||
* @param value - associated value (can be of any type)
|
||||
* @return true if the cache contained an entry with
|
||||
* this key, false if it did not
|
||||
*/
|
||||
add(key, value) {
|
||||
add(key: string, value): boolean {
|
||||
let entry = this._entryMap[key];
|
||||
if (entry) {
|
||||
entry.value = value;
|
||||
|
@ -54,12 +60,12 @@ class LRUCache {
|
|||
* Get the value associated to a key in the cache, making it the
|
||||
* most recently accessed for eviction purpose.
|
||||
*
|
||||
* @param {string} key - key of which to fetch the associated value
|
||||
* @return {object|undefined} - returns the associated value if
|
||||
* @param key - key of which to fetch the associated value
|
||||
* @return returns the associated value if
|
||||
* exists in the cache, or undefined if not found - either if the
|
||||
* key was never added or if it has been evicted from the cache.
|
||||
*/
|
||||
get(key) {
|
||||
get(key: string) {
|
||||
const entry = this._entryMap[key];
|
||||
if (entry) {
|
||||
// make the entry the most recently used by re-pushing it
|
||||
|
@ -74,12 +80,12 @@ class LRUCache {
|
|||
/**
|
||||
* Remove an entry from the cache if exists
|
||||
*
|
||||
* @param {string} key - key to remove
|
||||
* @return {boolean} true if an entry has been removed, false if
|
||||
* @param key - key to remove
|
||||
* @return true if an entry has been removed, false if
|
||||
* there was no entry with this key in the cache - either if the
|
||||
* key was never added or if it has been evicted from the cache.
|
||||
*/
|
||||
remove(key) {
|
||||
remove(key: string): boolean {
|
||||
const entry = this._entryMap[key];
|
||||
if (entry) {
|
||||
this._removeEntry(entry);
|
||||
|
@ -91,16 +97,14 @@ class LRUCache {
|
|||
/**
|
||||
* Get the current number of cached entries
|
||||
*
|
||||
* @return {number} current number of cached entries
|
||||
* @return current number of cached entries
|
||||
*/
|
||||
count() {
|
||||
count(): number {
|
||||
return this._entryCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all entries from the cache
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
clear() {
|
||||
this._entryMap = {};
|
||||
|
@ -113,8 +117,7 @@ class LRUCache {
|
|||
* Push an entry to the front of the LRU list, making it the most
|
||||
* recently accessed
|
||||
*
|
||||
* @param {object} entry - entry to push
|
||||
* @return {undefined}
|
||||
* @param entry - entry to push
|
||||
*/
|
||||
_lruPushEntry(entry) {
|
||||
/* eslint-disable no-param-reassign */
|
||||
|
@ -133,8 +136,7 @@ class LRUCache {
|
|||
/**
|
||||
* Remove an entry from the LRU list
|
||||
*
|
||||
* @param {object} entry - entry to remove
|
||||
* @return {undefined}
|
||||
* @param entry - entry to remove
|
||||
*/
|
||||
_lruRemoveEntry(entry) {
|
||||
/* eslint-disable no-param-reassign */
|
||||
|
@ -154,8 +156,7 @@ class LRUCache {
|
|||
/**
|
||||
* Helper function to remove an existing entry from the cache
|
||||
*
|
||||
* @param {object} entry - cache entry to remove
|
||||
* @return {undefined}
|
||||
* @param entry - cache entry to remove
|
||||
*/
|
||||
_removeEntry(entry) {
|
||||
this._lruRemoveEntry(entry);
|
||||
|
@ -163,5 +164,3 @@ class LRUCache {
|
|||
this._entryCount -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LRUCache;
|
|
@ -1,124 +0,0 @@
|
|||
export enum HeapOrder {
|
||||
Min = -1,
|
||||
Max = 1,
|
||||
}
|
||||
|
||||
export enum CompareResult {
|
||||
LT = -1,
|
||||
EQ = 0,
|
||||
GT = 1,
|
||||
}
|
||||
|
||||
export type CompareFunction = (x: any, y: any) => CompareResult;
|
||||
|
||||
export class Heap {
|
||||
size: number;
|
||||
_maxSize: number;
|
||||
_order: HeapOrder;
|
||||
_heap: any[];
|
||||
_cmpFn: CompareFunction;
|
||||
|
||||
constructor(size: number, order: HeapOrder, cmpFn: CompareFunction) {
|
||||
this.size = 0;
|
||||
this._maxSize = size;
|
||||
this._order = order;
|
||||
this._cmpFn = cmpFn;
|
||||
this._heap = new Array<any>(this._maxSize);
|
||||
}
|
||||
|
||||
_parent(i: number): number {
|
||||
return Math.floor((i - 1) / 2);
|
||||
}
|
||||
|
||||
_left(i: number): number {
|
||||
return Math.floor((2 * i) + 1);
|
||||
}
|
||||
|
||||
_right(i: number): number {
|
||||
return Math.floor((2 * i) + 2);
|
||||
}
|
||||
|
||||
_shouldSwap(childIdx: number, parentIdx: number): boolean {
|
||||
return this._cmpFn(this._heap[childIdx], this._heap[parentIdx]) as number === this._order as number;
|
||||
}
|
||||
|
||||
_swap(i: number, j: number) {
|
||||
const tmp = this._heap[i];
|
||||
this._heap[i] = this._heap[j];
|
||||
this._heap[j] = tmp;
|
||||
}
|
||||
|
||||
_heapify(i: number) {
|
||||
const l = this._left(i);
|
||||
const r = this._right(i);
|
||||
let c = i;
|
||||
|
||||
if (l < this.size && this._shouldSwap(l, c)) {
|
||||
c = l;
|
||||
}
|
||||
|
||||
if (r < this.size && this._shouldSwap(r, c)) {
|
||||
c = r;
|
||||
}
|
||||
|
||||
if (c != i) {
|
||||
this._swap(c, i);
|
||||
this._heapify(c);
|
||||
}
|
||||
}
|
||||
|
||||
add(item: any): any {
|
||||
if (this.size >= this._maxSize) {
|
||||
return new Error('Max heap size reached');
|
||||
}
|
||||
|
||||
++this.size;
|
||||
let c = this.size - 1;
|
||||
this._heap[c] = item;
|
||||
|
||||
while (c > 0) {
|
||||
if (!this._shouldSwap(c, this._parent(c))) {
|
||||
return null;
|
||||
}
|
||||
|
||||
this._swap(c, this._parent(c));
|
||||
c = this._parent(c);
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
remove(): any {
|
||||
if (this.size <= 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const ret = this._heap[0];
|
||||
this._heap[0] = this._heap[this.size - 1];
|
||||
this._heapify(0);
|
||||
--this.size;
|
||||
|
||||
return ret;
|
||||
};
|
||||
|
||||
peek(): any {
|
||||
if (this.size <= 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this._heap[0];
|
||||
};
|
||||
}
|
||||
|
||||
export class MinHeap extends Heap {
|
||||
constructor(size: number, cmpFn: CompareFunction) {
|
||||
super(size, HeapOrder.Min, cmpFn);
|
||||
}
|
||||
}
|
||||
|
||||
export class MaxHeap extends Heap {
|
||||
constructor(size: number, cmpFn: CompareFunction) {
|
||||
super(size, HeapOrder.Max, cmpFn);
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
import * as DelimiterTools from './list/tools';
|
||||
import { Skip } from './list/skip';
|
||||
import LRUCache from './cache/LRUCache';
|
||||
import MergeStream from './stream/MergeStream';
|
||||
|
||||
export * as list from './list/exportAlgos';
|
||||
export { default as SortedSet } from './set/SortedSet';
|
||||
export const listTools = { DelimiterTools, Skip };
|
||||
export const cache = { LRUCache };
|
||||
export const stream = { MergeStream };
|
|
@ -1,7 +1,4 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const { FILTER_ACCEPT, SKIP_NONE } = require('./tools');
|
||||
|
||||
import { FILTER_SKIP, SKIP_NONE } from './tools';
|
||||
// Use a heuristic to amortize the cost of JSON
|
||||
// serialization/deserialization only on largest metadata where the
|
||||
// potential for size reduction is high, considering the bulk of the
|
||||
|
@ -22,7 +19,12 @@ const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
|||
/**
|
||||
* Base class of listing extensions.
|
||||
*/
|
||||
class Extension {
|
||||
export default class Extension {
|
||||
parameters;
|
||||
logger;
|
||||
res?: any[];
|
||||
keys: number;
|
||||
|
||||
/**
|
||||
* This takes a list of parameters and a logger as the inputs.
|
||||
* Derivatives should have their own format regarding parameters.
|
||||
|
@ -51,14 +53,14 @@ class Extension {
|
|||
* heavy unused fields, or left untouched (depending on size
|
||||
* heuristics)
|
||||
*/
|
||||
trimMetadata(value) {
|
||||
let ret = undefined;
|
||||
trimMetadata(value: string): string {
|
||||
let ret: any = undefined;
|
||||
if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
||||
try {
|
||||
ret = JSON.parse(value);
|
||||
delete ret.location;
|
||||
ret = JSON.stringify(ret);
|
||||
} catch (e) {
|
||||
} catch (e: any) {
|
||||
// Prefer returning an unfiltered data rather than
|
||||
// stopping the service in case of parsing failure.
|
||||
// The risk of this approach is a potential
|
||||
|
@ -66,7 +68,8 @@ class Extension {
|
|||
// used by repd.
|
||||
this.logger.warn(
|
||||
'Could not parse Object Metadata while listing',
|
||||
{ err: e.toString() });
|
||||
{ err: e.toString() }
|
||||
);
|
||||
}
|
||||
}
|
||||
return ret || value;
|
||||
|
@ -92,38 +95,31 @@ class Extension {
|
|||
* @param {object} entry - a listing entry from metadata
|
||||
* expected format: { key, value }
|
||||
* @return {number} - result of filtering the entry:
|
||||
* FILTER_ACCEPT: entry is accepted and may or not be included
|
||||
* in the result
|
||||
* FILTER_SKIP: listing may skip directly (with "gte" param) to
|
||||
* the key returned by the skipping() method
|
||||
* FILTER_END: the results are complete, listing can be stopped
|
||||
* > 0: entry is accepted and included in the result
|
||||
* = 0: entry is accepted but not included (skipping)
|
||||
* < 0: entry is not accepted, listing should finish
|
||||
*/
|
||||
filter(/* entry: { key, value } */) {
|
||||
return FILTER_ACCEPT;
|
||||
filter(entry): number {
|
||||
return entry ? FILTER_SKIP : FILTER_SKIP;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides the next key at which the listing task is allowed to skip to.
|
||||
* This could allow to skip over:
|
||||
* - a key prefix ending with the delimiter
|
||||
* - all remaining versions of an object when doing a current
|
||||
* versions listing in v0 format
|
||||
* - a cached "gap" of deleted objects when doing a current
|
||||
* versions listing in v0 format
|
||||
* Provides the insight into why filter is skipping an entry. This could be
|
||||
* because it is skipping a range of delimited keys or a range of specific
|
||||
* version when doing master version listing.
|
||||
*
|
||||
* @return {string} - the next key at which the listing task is allowed to skip to
|
||||
* @return the insight: a common prefix or a master key,
|
||||
* or SKIP_NONE if there is no insight
|
||||
*/
|
||||
skipping() {
|
||||
skipping(): string | undefined {
|
||||
return SKIP_NONE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the listing resutls. Format depends on derivatives' specific logic.
|
||||
* @return {Array} - The listed elements
|
||||
* @return The listed elements
|
||||
*/
|
||||
result() {
|
||||
return this.res;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.default = Extension;
|
|
@ -1,9 +1,12 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
|
||||
FILTER_END, FILTER_ACCEPT, SKIP_NONE } = require('./tools');
|
||||
import {
|
||||
inc,
|
||||
checkLimit,
|
||||
listingParamsMasterKeysV0ToV1,
|
||||
FILTER_END,
|
||||
FILTER_ACCEPT,
|
||||
} from './tools';
|
||||
const DEFAULT_MAX_KEYS = 1000;
|
||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||
import { VersioningConstants as VSConst } from '../../versioning/constants';
|
||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||
|
||||
function numberDefault(num, defaultNum) {
|
||||
|
@ -14,7 +17,22 @@ function numberDefault(num, defaultNum) {
|
|||
/**
|
||||
* Class for the MultipartUploads extension
|
||||
*/
|
||||
class MultipartUploads {
|
||||
export class MultipartUploads {
|
||||
params
|
||||
vFormat
|
||||
CommonPrefixes
|
||||
Uploads
|
||||
IsTruncated
|
||||
NextKeyMarker
|
||||
NextUploadIdMarker
|
||||
prefixLength
|
||||
queryPrefixLength
|
||||
keys
|
||||
maxKeys
|
||||
delimiter
|
||||
splitter
|
||||
logger
|
||||
|
||||
/**
|
||||
* Constructor of the extension
|
||||
* Init and check parameters
|
||||
|
@ -39,7 +57,9 @@ class MultipartUploads {
|
|||
this.splitter = params.splitter;
|
||||
this.logger = logger;
|
||||
|
||||
Object.assign(this, {
|
||||
Object.assign(
|
||||
this,
|
||||
{
|
||||
[BucketVersioningKeyFormat.v0]: {
|
||||
genMDParams: this.genMDParamsV0,
|
||||
getObjectKey: this.getObjectKeyV0,
|
||||
|
@ -48,13 +68,15 @@ class MultipartUploads {
|
|||
genMDParams: this.genMDParamsV1,
|
||||
getObjectKey: this.getObjectKeyV1,
|
||||
},
|
||||
}[this.vFormat]);
|
||||
}[this.vFormat]
|
||||
);
|
||||
}
|
||||
|
||||
genMDParamsV0() {
|
||||
const params = {};
|
||||
if (this.params.keyMarker) {
|
||||
params.gt = `overview${this.params.splitter}` +
|
||||
params.gt =
|
||||
`overview${this.params.splitter}` +
|
||||
`${this.params.keyMarker}${this.params.splitter}`;
|
||||
if (this.params.uploadIdMarker) {
|
||||
params.gt += `${this.params.uploadIdMarker}`;
|
||||
|
@ -147,14 +169,20 @@ class MultipartUploads {
|
|||
if (this.delimiter) {
|
||||
const mpuPrefixSlice = `overview${this.splitter}`.length;
|
||||
const mpuKey = key.slice(mpuPrefixSlice);
|
||||
const commonPrefixIndex = mpuKey.indexOf(this.delimiter,
|
||||
this.queryPrefixLength);
|
||||
const commonPrefixIndex = mpuKey.indexOf(
|
||||
this.delimiter,
|
||||
this.queryPrefixLength
|
||||
);
|
||||
|
||||
if (commonPrefixIndex === -1) {
|
||||
this.addUpload(value);
|
||||
} else {
|
||||
this.addCommonPrefix(mpuKey.substring(0,
|
||||
commonPrefixIndex + this.delimiter.length));
|
||||
this.addCommonPrefix(
|
||||
mpuKey.substring(
|
||||
0,
|
||||
commonPrefixIndex + this.delimiter.length
|
||||
)
|
||||
);
|
||||
}
|
||||
} else {
|
||||
this.addUpload(value);
|
||||
|
@ -163,7 +191,7 @@ class MultipartUploads {
|
|||
}
|
||||
|
||||
skipping() {
|
||||
return SKIP_NONE;
|
||||
return '';
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -182,7 +210,3 @@ class MultipartUploads {
|
|||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
MultipartUploads,
|
||||
};
|
|
@ -1,14 +1,17 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
import Extension from './Extension';
|
||||
import { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } from './tools';
|
||||
|
||||
const Extension = require('./Extension').default;
|
||||
|
||||
const { checkLimit, FILTER_END, FILTER_ACCEPT } = require('./tools');
|
||||
const DEFAULT_MAX_KEYS = 10000;
|
||||
|
||||
/**
|
||||
* Class of an extension doing the simple listing
|
||||
*/
|
||||
class List extends Extension {
|
||||
export class List extends Extension {
|
||||
maxKeys: number;
|
||||
filterKey;
|
||||
filterKeyStartsWith;
|
||||
res: any[];
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* Set the logger and the res
|
||||
|
@ -30,15 +33,17 @@ class List extends Extension {
|
|||
}
|
||||
|
||||
genMDParams() {
|
||||
const params = this.parameters ? {
|
||||
const params = this.parameters
|
||||
? {
|
||||
gt: this.parameters.gt,
|
||||
gte: this.parameters.gte || this.parameters.start,
|
||||
lt: this.parameters.lt,
|
||||
lte: this.parameters.lte || this.parameters.end,
|
||||
keys: this.parameters.keys,
|
||||
values: this.parameters.values,
|
||||
} : {};
|
||||
Object.keys(params).forEach(key => {
|
||||
}
|
||||
: {};
|
||||
Object.keys(params).forEach((key) => {
|
||||
if (params[key] === null || params[key] === undefined) {
|
||||
delete params[key];
|
||||
}
|
||||
|
@ -53,29 +58,30 @@ class List extends Extension {
|
|||
*
|
||||
* @return {Boolean} Returns true if matches, else false.
|
||||
*/
|
||||
customFilter(value) {
|
||||
let _value;
|
||||
customFilter(value: string): boolean {
|
||||
let _value: any;
|
||||
try {
|
||||
_value = JSON.parse(value);
|
||||
} catch (e) {
|
||||
} catch (e: any) {
|
||||
// Prefer returning an unfiltered data rather than
|
||||
// stopping the service in case of parsing failure.
|
||||
// The risk of this approach is a potential
|
||||
// reproduction of MD-692, where too much memory is
|
||||
// used by repd.
|
||||
this.logger.warn(
|
||||
'Could not parse Object Metadata while listing',
|
||||
{ err: e.toString() });
|
||||
this.logger.warn('Could not parse Object Metadata while listing', {
|
||||
err: e.toString(),
|
||||
});
|
||||
return false;
|
||||
}
|
||||
if (_value.customAttributes !== undefined) {
|
||||
for (const key of Object.keys(_value.customAttributes)) {
|
||||
if (this.filterKey !== undefined &&
|
||||
key === this.filterKey) {
|
||||
if (this.filterKey !== undefined && key === this.filterKey) {
|
||||
return true;
|
||||
}
|
||||
if (this.filterKeyStartsWith !== undefined &&
|
||||
key.startsWith(this.filterKeyStartsWith)) {
|
||||
if (
|
||||
this.filterKeyStartsWith !== undefined &&
|
||||
key.startsWith(this.filterKeyStartsWith)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -90,16 +96,18 @@ class List extends Extension {
|
|||
* @return {number} - > 0 : continue listing
|
||||
* < 0 : listing done
|
||||
*/
|
||||
filter(elem) {
|
||||
// Check if the result array is full
|
||||
filter(elem): number {
|
||||
// Check first in case of maxkeys <= 0
|
||||
if (this.keys >= this.maxKeys) {
|
||||
return FILTER_END;
|
||||
}
|
||||
if ((this.filterKey !== undefined ||
|
||||
if (
|
||||
(this.filterKey !== undefined ||
|
||||
this.filterKeyStartsWith !== undefined) &&
|
||||
typeof elem === 'object' &&
|
||||
!this.customFilter(elem.value)) {
|
||||
return FILTER_ACCEPT;
|
||||
!this.customFilter(elem.value)
|
||||
) {
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
if (typeof elem === 'object') {
|
||||
this.res.push({
|
||||
|
@ -121,7 +129,3 @@ class List extends Extension {
|
|||
return this.res;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
List,
|
||||
};
|
|
@ -1,48 +1,29 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const Extension = require('./Extension').default;
|
||||
const { inc, listingParamsMasterKeysV0ToV1,
|
||||
FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||
|
||||
export interface FilterState {
|
||||
id: number,
|
||||
};
|
||||
|
||||
export interface FilterReturnValue {
|
||||
import Extension from './Extension';
|
||||
import {
|
||||
inc,
|
||||
listingParamsMasterKeysV0ToV1,
|
||||
FILTER_END,
|
||||
FILTER_ACCEPT,
|
||||
FILTER_SKIP,
|
||||
FILTER_END,
|
||||
};
|
||||
} from './tools';
|
||||
import { VersioningConstants as VSConst } from '../../versioning/constants';
|
||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||
|
||||
export const enum DelimiterFilterStateId {
|
||||
NotSkipping = 1,
|
||||
SkippingPrefix = 2,
|
||||
};
|
||||
|
||||
export interface DelimiterFilterState_NotSkipping extends FilterState {
|
||||
id: DelimiterFilterStateId.NotSkipping,
|
||||
};
|
||||
|
||||
export interface DelimiterFilterState_SkippingPrefix extends FilterState {
|
||||
id: DelimiterFilterStateId.SkippingPrefix,
|
||||
prefix: string;
|
||||
};
|
||||
|
||||
type KeyHandler = (key: string, value: string) => FilterReturnValue;
|
||||
|
||||
export type ResultObject = {
|
||||
CommonPrefixes: string[];
|
||||
Contents: {
|
||||
key: string;
|
||||
value: string;
|
||||
}[];
|
||||
IsTruncated: boolean;
|
||||
Delimiter ?: string;
|
||||
NextMarker ?: string;
|
||||
NextContinuationToken ?: string;
|
||||
};
|
||||
/**
|
||||
* Find the common prefix in the path
|
||||
*
|
||||
* @param {String} key - path of the object
|
||||
* @param {String} delimiter - separator
|
||||
* @param {Number} delimiterIndex - 'folder' index in the path
|
||||
* @return {String} - CommonPrefix
|
||||
*/
|
||||
function getCommonPrefix(
|
||||
key: string,
|
||||
delimiter: string,
|
||||
delimiterIndex: number
|
||||
): string {
|
||||
return key.substring(0, delimiterIndex + delimiter.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle object listing with parameters
|
||||
|
@ -57,9 +38,23 @@ export type ResultObject = {
|
|||
* @prop {Number} maxKeys - number of keys to list
|
||||
*/
|
||||
export class Delimiter extends Extension {
|
||||
|
||||
state: FilterState;
|
||||
keyHandlers: { [id: number]: KeyHandler };
|
||||
CommonPrefixes: string[];
|
||||
Contents: string[];
|
||||
IsTruncated: boolean;
|
||||
NextMarker?: string;
|
||||
keys: number;
|
||||
delimiter?: string;
|
||||
prefix?: string;
|
||||
maxKeys: number;
|
||||
marker;
|
||||
startAfter;
|
||||
continuationToken;
|
||||
alphabeticalOrder;
|
||||
vFormat;
|
||||
NextContinuationToken;
|
||||
startMarker;
|
||||
continueMarker;
|
||||
nextContinueMarker;
|
||||
|
||||
/**
|
||||
* Create a new Delimiter instance
|
||||
|
@ -78,6 +73,9 @@ export class Delimiter extends Extension {
|
|||
* format
|
||||
* @param {String} [parameters.continuationToken] - obfuscated amazon
|
||||
* token
|
||||
* @param {Boolean} [parameters.alphabeticalOrder] - Either the result is
|
||||
* alphabetically ordered
|
||||
* or not
|
||||
* @param {RequestLogger} logger - The logger of the
|
||||
* request
|
||||
* @param {String} [vFormat] - versioning key format
|
||||
|
@ -85,25 +83,51 @@ export class Delimiter extends Extension {
|
|||
constructor(parameters, logger, vFormat) {
|
||||
super(parameters, logger);
|
||||
// original listing parameters
|
||||
this.keys = 0;
|
||||
this.delimiter = parameters.delimiter;
|
||||
this.prefix = parameters.prefix;
|
||||
this.maxKeys = parameters.maxKeys || 1000;
|
||||
|
||||
if (parameters.v2) {
|
||||
this.marker = parameters.continuationToken || parameters.startAfter;
|
||||
} else {
|
||||
this.marker = parameters.marker;
|
||||
}
|
||||
this.nextMarker = this.marker;
|
||||
this.maxKeys = parameters.maxKeys || 1000;
|
||||
this.startAfter = parameters.startAfter;
|
||||
this.continuationToken = parameters.continuationToken;
|
||||
this.alphabeticalOrder =
|
||||
typeof parameters.alphabeticalOrder !== 'undefined'
|
||||
? parameters.alphabeticalOrder
|
||||
: true;
|
||||
|
||||
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
||||
// results
|
||||
this.CommonPrefixes = [];
|
||||
this.Contents = [];
|
||||
this.IsTruncated = false;
|
||||
this.keyHandlers = {};
|
||||
this.NextMarker = parameters.marker;
|
||||
this.NextContinuationToken =
|
||||
parameters.continuationToken || parameters.startAfter;
|
||||
|
||||
Object.assign(this, {
|
||||
this.startMarker = parameters.v2 ? 'startAfter' : 'marker';
|
||||
this.continueMarker = parameters.v2 ? 'continuationToken' : 'marker';
|
||||
this.nextContinueMarker = parameters.v2
|
||||
? 'NextContinuationToken'
|
||||
: 'NextMarker';
|
||||
|
||||
if (
|
||||
this.delimiter !== undefined &&
|
||||
this[this.nextContinueMarker] !== undefined &&
|
||||
this[this.nextContinueMarker].startsWith(this.prefix || '')
|
||||
) {
|
||||
const nextDelimiterIndex = this[this.nextContinueMarker].indexOf(
|
||||
this.delimiter,
|
||||
this.prefix ? this.prefix.length : 0
|
||||
);
|
||||
this[this.nextContinueMarker] = this[this.nextContinueMarker].slice(
|
||||
0,
|
||||
nextDelimiterIndex + this.delimiter.length
|
||||
);
|
||||
}
|
||||
|
||||
Object.assign(
|
||||
this,
|
||||
{
|
||||
[BucketVersioningKeyFormat.v0]: {
|
||||
genMDParams: this.genMDParamsV0,
|
||||
getObjectKey: this.getObjectKeyV0,
|
||||
|
@ -114,50 +138,23 @@ export class Delimiter extends Extension {
|
|||
getObjectKey: this.getObjectKeyV1,
|
||||
skipping: this.skippingV1,
|
||||
},
|
||||
}[this.vFormat]);
|
||||
|
||||
// if there is a delimiter, we may skip ranges by prefix,
|
||||
// hence using the NotSkippingPrefix flavor that checks the
|
||||
// subprefix up to the delimiter for the NotSkipping state
|
||||
if (this.delimiter) {
|
||||
this.setKeyHandler(
|
||||
DelimiterFilterStateId.NotSkipping,
|
||||
this.keyHandler_NotSkippingPrefix.bind(this));
|
||||
} else {
|
||||
// listing without a delimiter never has to skip over any
|
||||
// prefix -> use NeverSkipping flavor for the NotSkipping
|
||||
// state
|
||||
this.setKeyHandler(
|
||||
DelimiterFilterStateId.NotSkipping,
|
||||
this.keyHandler_NeverSkipping.bind(this));
|
||||
}
|
||||
this.setKeyHandler(
|
||||
DelimiterFilterStateId.SkippingPrefix,
|
||||
this.keyHandler_SkippingPrefix.bind(this));
|
||||
|
||||
this.state = <DelimiterFilterState_NotSkipping> {
|
||||
id: DelimiterFilterStateId.NotSkipping,
|
||||
};
|
||||
}[this.vFormat]
|
||||
);
|
||||
}
|
||||
|
||||
genMDParamsV0() {
|
||||
const params: { gt ?: string, gte ?: string, lt ?: string } = {};
|
||||
const params: { gte?: string; lt?: string; gt?: string } = {};
|
||||
if (this.prefix) {
|
||||
params.gte = this.prefix;
|
||||
params.lt = inc(this.prefix);
|
||||
}
|
||||
if (this.marker && this.delimiter) {
|
||||
const commonPrefix = this.getCommonPrefix(this.marker);
|
||||
if (commonPrefix) {
|
||||
const afterPrefix = inc(commonPrefix);
|
||||
if (!params.gte || afterPrefix > params.gte) {
|
||||
params.gte = afterPrefix;
|
||||
const startVal = this[this.continueMarker] || this[this.startMarker];
|
||||
if (startVal) {
|
||||
if (params.gte && params.gte > startVal) {
|
||||
return params;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (this.marker && (!params.gte || this.marker >= params.gte)) {
|
||||
delete params.gte;
|
||||
params.gt = this.marker;
|
||||
params.gt = startVal;
|
||||
}
|
||||
return params;
|
||||
}
|
||||
|
@ -189,53 +186,21 @@ export class Delimiter extends Extension {
|
|||
* @param {String} value - The value of the key
|
||||
* @return {number} - indicates if iteration should continue
|
||||
*/
|
||||
addContents(key: string, value: string): void {
|
||||
addContents(key: string, value: string): number {
|
||||
if (this._reachedMaxKeys()) {
|
||||
return FILTER_END;
|
||||
}
|
||||
this.Contents.push({ key, value: this.trimMetadata(value) });
|
||||
this[this.nextContinueMarker] = key;
|
||||
++this.keys;
|
||||
this.nextMarker = key;
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
getCommonPrefix(key: string): string | undefined {
|
||||
if (!this.delimiter) {
|
||||
return undefined;
|
||||
}
|
||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
||||
if (delimiterIndex === -1) {
|
||||
return undefined;
|
||||
}
|
||||
return key.substring(0, delimiterIndex + this.delimiter.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a Common Prefix in the list
|
||||
* @param {String} commonPrefix - common prefix to add
|
||||
* @param {String} key - full key starting with commonPrefix
|
||||
* @return {Boolean} - indicates if iteration should continue
|
||||
*/
|
||||
addCommonPrefix(commonPrefix: string, key: string): void {
|
||||
// add the new prefix to the list
|
||||
this.CommonPrefixes.push(commonPrefix);
|
||||
++this.keys;
|
||||
this.nextMarker = commonPrefix;
|
||||
}
|
||||
|
||||
addCommonPrefixOrContents(key: string, value: string): string | undefined {
|
||||
// add the subprefix to the common prefixes if the key has the delimiter
|
||||
const commonPrefix = this.getCommonPrefix(key);
|
||||
if (commonPrefix) {
|
||||
this.addCommonPrefix(commonPrefix, key);
|
||||
return commonPrefix;
|
||||
}
|
||||
this.addContents(key, value);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
getObjectKeyV0(obj: { key: string }): string {
|
||||
getObjectKeyV0(obj: { key: string }) {
|
||||
return obj.key;
|
||||
}
|
||||
|
||||
getObjectKeyV1(obj: { key: string }): string {
|
||||
getObjectKeyV1(obj: { key: string }) {
|
||||
return obj.key.slice(DbPrefixes.Master.length);
|
||||
}
|
||||
|
||||
|
@ -250,81 +215,71 @@ export class Delimiter extends Extension {
|
|||
* @param {String} obj.value - The value of the element
|
||||
* @return {number} - indicates if iteration should continue
|
||||
*/
|
||||
filter(obj: { key: string, value: string }): FilterReturnValue {
|
||||
filter(obj: { key: string; value: string }): number {
|
||||
const key = this.getObjectKey(obj);
|
||||
const value = obj.value;
|
||||
|
||||
return this.handleKey(key, value);
|
||||
}
|
||||
|
||||
setState(state: FilterState): void {
|
||||
this.state = state;
|
||||
}
|
||||
|
||||
setKeyHandler(stateId: number, keyHandler: KeyHandler): void {
|
||||
this.keyHandlers[stateId] = keyHandler;
|
||||
}
|
||||
|
||||
handleKey(key: string, value: string): FilterReturnValue {
|
||||
return this.keyHandlers[this.state.id](key, value);
|
||||
}
|
||||
|
||||
keyHandler_NeverSkipping(key: string, value: string): FilterReturnValue {
|
||||
if (this._reachedMaxKeys()) {
|
||||
return FILTER_END;
|
||||
}
|
||||
this.addContents(key, value);
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
keyHandler_NotSkippingPrefix(key: string, value: string): FilterReturnValue {
|
||||
if (this._reachedMaxKeys()) {
|
||||
return FILTER_END;
|
||||
}
|
||||
const commonPrefix = this.addCommonPrefixOrContents(key, value);
|
||||
if (commonPrefix) {
|
||||
// transition into SkippingPrefix state to skip all following keys
|
||||
// while they start with the same prefix
|
||||
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
||||
id: DelimiterFilterStateId.SkippingPrefix,
|
||||
prefix: commonPrefix,
|
||||
});
|
||||
}
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
keyHandler_SkippingPrefix(key: string, value: string): FilterReturnValue {
|
||||
const { prefix } = <DelimiterFilterState_SkippingPrefix> this.state;
|
||||
if (key.startsWith(prefix)) {
|
||||
if (
|
||||
(this.prefix && !key.startsWith(this.prefix)) ||
|
||||
(this.alphabeticalOrder &&
|
||||
typeof this[this.nextContinueMarker] === 'string' &&
|
||||
key <= this[this.nextContinueMarker])
|
||||
) {
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
this.setState(<DelimiterFilterState_NotSkipping> {
|
||||
id: DelimiterFilterStateId.NotSkipping,
|
||||
});
|
||||
return this.handleKey(key, value);
|
||||
if (this.delimiter) {
|
||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
||||
if (delimiterIndex === -1) {
|
||||
return this.addContents(key, value);
|
||||
}
|
||||
return this.addCommonPrefix(key, delimiterIndex);
|
||||
}
|
||||
return this.addContents(key, value);
|
||||
}
|
||||
|
||||
skippingBase(): string | undefined {
|
||||
switch (this.state.id) {
|
||||
case DelimiterFilterStateId.SkippingPrefix:
|
||||
const { prefix } = <DelimiterFilterState_SkippingPrefix> this.state;
|
||||
return inc(prefix);
|
||||
|
||||
default:
|
||||
return SKIP_NONE;
|
||||
/**
|
||||
* Add a Common Prefix in the list
|
||||
* @param {String} key - object name
|
||||
* @param {Number} index - after prefix starting point
|
||||
* @return {Boolean} - indicates if iteration should continue
|
||||
*/
|
||||
addCommonPrefix(key: string, index: number): boolean {
|
||||
const commonPrefix = getCommonPrefix(key, this.delimiter, index);
|
||||
if (
|
||||
this.CommonPrefixes.indexOf(commonPrefix) === -1 &&
|
||||
this[this.nextContinueMarker] !== commonPrefix
|
||||
) {
|
||||
if (this._reachedMaxKeys()) {
|
||||
return FILTER_END;
|
||||
}
|
||||
this.CommonPrefixes.push(commonPrefix);
|
||||
this[this.nextContinueMarker] = commonPrefix;
|
||||
++this.keys;
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
|
||||
skippingV0() {
|
||||
return this.skippingBase();
|
||||
/**
|
||||
* If repd happens to want to skip listing on a bucket in v0
|
||||
* versioning key format, here is an idea.
|
||||
*
|
||||
* @return {string} - the present range (NextMarker) if repd believes
|
||||
* that it's enough and should move on
|
||||
*/
|
||||
skippingV0(): string {
|
||||
return this[this.nextContinueMarker];
|
||||
}
|
||||
|
||||
skippingV1() {
|
||||
const skipTo = this.skippingBase();
|
||||
if (skipTo === SKIP_NONE) {
|
||||
return SKIP_NONE;
|
||||
}
|
||||
return DbPrefixes.Master + skipTo;
|
||||
/**
|
||||
* If repd happens to want to skip listing on a bucket in v1
|
||||
* versioning key format, here is an idea.
|
||||
*
|
||||
* @return {string} - the present range (NextMarker) if repd believes
|
||||
* that it's enough and should move on
|
||||
*/
|
||||
skippingV1(): string {
|
||||
return DbPrefixes.Master + this[this.nextContinueMarker];
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -333,23 +288,28 @@ export class Delimiter extends Extension {
|
|||
* isn't truncated
|
||||
* @return {Object} - following amazon format
|
||||
*/
|
||||
result(): ResultObject {
|
||||
result() {
|
||||
/* NextMarker is only provided when delimiter is used.
|
||||
* specified in v1 listing documentation
|
||||
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
|
||||
*/
|
||||
const result: ResultObject = {
|
||||
const result = {
|
||||
CommonPrefixes: this.CommonPrefixes,
|
||||
Contents: this.Contents,
|
||||
IsTruncated: this.IsTruncated,
|
||||
Delimiter: this.delimiter,
|
||||
};
|
||||
if (this.parameters.v2) {
|
||||
//
|
||||
result.NextContinuationToken = this.IsTruncated
|
||||
? this.nextMarker : undefined;
|
||||
? this.NextContinuationToken
|
||||
: undefined;
|
||||
} else {
|
||||
result.NextMarker = (this.IsTruncated && this.delimiter)
|
||||
? this.nextMarker : undefined;
|
||||
//
|
||||
result.NextMarker =
|
||||
this.IsTruncated && this.delimiter
|
||||
? this.NextMarker
|
||||
: undefined;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -1,127 +0,0 @@
|
|||
const { DelimiterMaster } = require('./delimiterMaster');
|
||||
const { FILTER_ACCEPT, FILTER_END } = require('./tools');
|
||||
|
||||
type ResultObject = {
|
||||
Contents: {
|
||||
key: string;
|
||||
value: string;
|
||||
}[];
|
||||
IsTruncated: boolean;
|
||||
NextMarker ?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Handle object listing with parameters. This extends the base class DelimiterMaster
|
||||
* to return the master/current versions.
|
||||
*/
|
||||
class DelimiterCurrent extends DelimiterMaster {
|
||||
/**
|
||||
* Delimiter listing of current versions.
|
||||
* @param {Object} parameters - listing parameters
|
||||
* @param {String} parameters.beforeDate - limit the response to keys older than beforeDate
|
||||
* @param {String} parameters.excludedDataStoreName - excluded datatore name
|
||||
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
||||
* @param {RequestLogger} logger - The logger of the request
|
||||
* @param {String} [vFormat] - versioning key format
|
||||
*/
|
||||
constructor(parameters, logger, vFormat) {
|
||||
super(parameters, logger, vFormat);
|
||||
|
||||
this.beforeDate = parameters.beforeDate;
|
||||
this.excludedDataStoreName = parameters.excludedDataStoreName;
|
||||
this.maxScannedLifecycleListingEntries = parameters.maxScannedLifecycleListingEntries;
|
||||
this.scannedKeys = 0;
|
||||
}
|
||||
|
||||
genMDParamsV0() {
|
||||
const params = super.genMDParamsV0();
|
||||
// lastModified and dataStoreName parameters are used by metadata that enables built-in filtering,
|
||||
// a feature currently exclusive to MongoDB
|
||||
if (this.beforeDate) {
|
||||
params.lastModified = {
|
||||
lt: this.beforeDate,
|
||||
};
|
||||
}
|
||||
|
||||
if (this.excludedDataStoreName) {
|
||||
params.dataStoreName = {
|
||||
ne: this.excludedDataStoreName,
|
||||
}
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the stringified entry's value.
|
||||
* @param s - sringified value
|
||||
* @return - undefined if parsing fails, otherwise it contains the parsed value.
|
||||
*/
|
||||
_parse(s) {
|
||||
let p;
|
||||
try {
|
||||
p = JSON.parse(s);
|
||||
} catch (e: any) {
|
||||
this.logger.warn(
|
||||
'Could not parse Object Metadata while listing',
|
||||
{ err: e.toString() });
|
||||
}
|
||||
return p;
|
||||
}
|
||||
|
||||
/**
|
||||
* check if the max keys count has been reached and set the
|
||||
* final state of the result if it is the case
|
||||
*
|
||||
* specialized implementation on DelimiterCurrent to also check
|
||||
* the number of scanned keys
|
||||
*
|
||||
* @return {Boolean} - indicates if the iteration has to stop
|
||||
*/
|
||||
_reachedMaxKeys(): boolean {
|
||||
if (this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries) {
|
||||
this.IsTruncated = true;
|
||||
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
||||
{
|
||||
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
||||
scannedKeys: this.scannedKeys,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
return super._reachedMaxKeys();
|
||||
}
|
||||
|
||||
addContents(key, value) {
|
||||
++this.scannedKeys;
|
||||
const parsedValue = this._parse(value);
|
||||
// if parsing fails, skip the key.
|
||||
if (parsedValue) {
|
||||
const lastModified = parsedValue['last-modified'];
|
||||
const dataStoreName = parsedValue.dataStoreName;
|
||||
// We then check if the current version is older than the "beforeDate" and
|
||||
// "excludedDataStoreName" is not specified or if specified and the data store name is different.
|
||||
if ((!this.beforeDate || (lastModified && lastModified < this.beforeDate)) &&
|
||||
(!this.excludedDataStoreName || dataStoreName !== this.excludedDataStoreName)) {
|
||||
super.addContents(key, value);
|
||||
}
|
||||
// In the event of a timeout occurring before any content is added,
|
||||
// NextMarker is updated even if the object is not eligible.
|
||||
// It minimizes the amount of data that the client needs to re-process if the request times out.
|
||||
this.nextMarker = key;
|
||||
}
|
||||
}
|
||||
|
||||
result(): object {
|
||||
const result: ResultObject = {
|
||||
Contents: this.Contents,
|
||||
IsTruncated: this.IsTruncated,
|
||||
};
|
||||
|
||||
if (this.IsTruncated) {
|
||||
result.NextMarker = this.nextMarker;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
module.exports = { DelimiterCurrent };
|
|
@ -1,620 +1,205 @@
|
|||
import {
|
||||
Delimiter,
|
||||
FilterState,
|
||||
FilterReturnValue,
|
||||
DelimiterFilterStateId,
|
||||
DelimiterFilterState_NotSkipping,
|
||||
DelimiterFilterState_SkippingPrefix,
|
||||
ResultObject,
|
||||
} from './delimiter';
|
||||
const Version = require('../../versioning/Version').Version;
|
||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||
import { Delimiter } from './delimiter';
|
||||
import { Version } from '../../versioning/Version';
|
||||
import { VersioningConstants as VSConst } from '../../versioning/constants';
|
||||
const { BucketVersioningKeyFormat } = VSConst;
|
||||
const { FILTER_ACCEPT, FILTER_SKIP, FILTER_END, SKIP_NONE, inc } = require('./tools');
|
||||
|
||||
import { GapSetEntry } from '../cache/GapSet';
|
||||
import { GapCacheInterface } from '../cache/GapCache';
|
||||
import { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } from './tools';
|
||||
|
||||
const VID_SEP = VSConst.VersionId.Separator;
|
||||
const { DbPrefixes } = VSConst;
|
||||
|
||||
export const enum DelimiterMasterFilterStateId {
|
||||
SkippingVersionsV0 = 101,
|
||||
WaitVersionAfterPHDV0 = 102,
|
||||
SkippingGapV0 = 103,
|
||||
};
|
||||
|
||||
interface DelimiterMasterFilterState_SkippingVersionsV0 extends FilterState {
|
||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||
masterKey: string,
|
||||
};
|
||||
|
||||
interface DelimiterMasterFilterState_WaitVersionAfterPHDV0 extends FilterState {
|
||||
id: DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
||||
masterKey: string,
|
||||
};
|
||||
|
||||
interface DelimiterMasterFilterState_SkippingGapV0 extends FilterState {
|
||||
id: DelimiterMasterFilterStateId.SkippingGapV0,
|
||||
};
|
||||
|
||||
export const enum GapCachingState {
|
||||
NoGapCache = 0, // there is no gap cache
|
||||
UnknownGap = 1, // waiting for a cache lookup
|
||||
GapLookupInProgress = 2, // asynchronous gap lookup in progress
|
||||
GapCached = 3, // an upcoming or already skippable gap is cached
|
||||
NoMoreGap = 4, // the cache doesn't have any more gaps inside the listed range
|
||||
};
|
||||
|
||||
type GapCachingInfo_NoGapCache = {
|
||||
state: GapCachingState.NoGapCache;
|
||||
};
|
||||
|
||||
type GapCachingInfo_NoCachedGap = {
|
||||
state: GapCachingState.UnknownGap
|
||||
| GapCachingState.GapLookupInProgress
|
||||
gapCache: GapCacheInterface;
|
||||
};
|
||||
|
||||
type GapCachingInfo_GapCached = {
|
||||
state: GapCachingState.GapCached;
|
||||
gapCache: GapCacheInterface;
|
||||
gapCached: GapSetEntry;
|
||||
};
|
||||
|
||||
type GapCachingInfo_NoMoreGap = {
|
||||
state: GapCachingState.NoMoreGap;
|
||||
};
|
||||
|
||||
type GapCachingInfo = GapCachingInfo_NoGapCache
|
||||
| GapCachingInfo_NoCachedGap
|
||||
| GapCachingInfo_GapCached
|
||||
| GapCachingInfo_NoMoreGap;
|
||||
|
||||
|
||||
export const enum GapBuildingState {
|
||||
Disabled = 0, // no gap cache or no gap building needed (e.g. in V1 versioning format)
|
||||
NotBuilding = 1, // not currently building a gap (i.e. not listing within a gap)
|
||||
Building = 2, // currently building a gap (i.e. listing within a gap)
|
||||
Expired = 3, // not allowed to build due to exposure delay timeout
|
||||
};
|
||||
|
||||
type GapBuildingInfo_NothingToBuild = {
|
||||
state: GapBuildingState.Disabled | GapBuildingState.Expired;
|
||||
};
|
||||
|
||||
type GapBuildingParams = {
|
||||
/**
|
||||
* minimum weight for a gap to be created in the cache
|
||||
*/
|
||||
minGapWeight: number;
|
||||
/**
|
||||
* trigger a cache setGap() call every N skippable keys
|
||||
*/
|
||||
triggerSaveGapWeight: number;
|
||||
/**
|
||||
* timestamp to assess whether we're still inside the validity period to
|
||||
* be allowed to build gaps
|
||||
*/
|
||||
initTimestamp: number;
|
||||
};
|
||||
|
||||
type GapBuildingInfo_NotBuilding = {
|
||||
state: GapBuildingState.NotBuilding;
|
||||
gapCache: GapCacheInterface;
|
||||
params: GapBuildingParams;
|
||||
};
|
||||
|
||||
type GapBuildingInfo_Building = {
|
||||
state: GapBuildingState.Building;
|
||||
gapCache: GapCacheInterface;
|
||||
params: GapBuildingParams;
|
||||
/**
|
||||
* Gap currently being created
|
||||
*/
|
||||
gap: GapSetEntry;
|
||||
/**
|
||||
* total current weight of the gap being created
|
||||
*/
|
||||
gapWeight: number;
|
||||
};
|
||||
|
||||
type GapBuildingInfo = GapBuildingInfo_NothingToBuild
|
||||
| GapBuildingInfo_NotBuilding
|
||||
| GapBuildingInfo_Building;
|
||||
|
||||
/**
|
||||
* Handle object listing with parameters. This extends the base class Delimiter
|
||||
* to return the raw master versions of existing objects.
|
||||
*/
|
||||
export class DelimiterMaster extends Delimiter {
|
||||
|
||||
_gapCaching: GapCachingInfo;
|
||||
_gapBuilding: GapBuildingInfo;
|
||||
_refreshedBuildingParams: GapBuildingParams | null;
|
||||
prvKey;
|
||||
prvPHDKey;
|
||||
inReplayPrefix;
|
||||
|
||||
/**
|
||||
* Delimiter listing of master versions.
|
||||
* @param {Object} parameters - listing parameters
|
||||
* @param {String} [parameters.delimiter] - delimiter per amazon format
|
||||
* @param {String} [parameters.prefix] - prefix per amazon format
|
||||
* @param {String} [parameters.marker] - marker per amazon format
|
||||
* @param {Number} [parameters.maxKeys] - number of keys to list
|
||||
* @param {Boolean} [parameters.v2] - indicates whether v2 format
|
||||
* @param {String} [parameters.startAfter] - marker per amazon v2 format
|
||||
* @param {String} [parameters.continuationToken] - obfuscated amazon token
|
||||
* @param {String} parameters.delimiter - delimiter per amazon format
|
||||
* @param {String} parameters.prefix - prefix per amazon format
|
||||
* @param {String} parameters.marker - marker per amazon format
|
||||
* @param {Number} parameters.maxKeys - number of keys to list
|
||||
* @param {Boolean} parameters.v2 - indicates whether v2 format
|
||||
* @param {String} parameters.startAfter - marker per amazon v2 format
|
||||
* @param {String} parameters.continuationToken - obfuscated amazon token
|
||||
* @param {RequestLogger} logger - The logger of the request
|
||||
* @param {String} [vFormat="v0"] - versioning key format
|
||||
* @param {String} [vFormat] - versioning key format
|
||||
*/
|
||||
constructor(parameters, logger, vFormat?: string) {
|
||||
constructor(parameters, logger, vFormat) {
|
||||
super(parameters, logger, vFormat);
|
||||
// non-PHD master version or a version whose master is a PHD version
|
||||
this.prvKey = undefined;
|
||||
this.prvPHDKey = undefined;
|
||||
this.inReplayPrefix = false;
|
||||
|
||||
if (this.vFormat === BucketVersioningKeyFormat.v0) {
|
||||
// override Delimiter's implementation of NotSkipping for
|
||||
// DelimiterMaster logic (skipping versions and special
|
||||
// handling of delete markers and PHDs)
|
||||
this.setKeyHandler(
|
||||
DelimiterFilterStateId.NotSkipping,
|
||||
this.keyHandler_NotSkippingPrefixNorVersionsV0.bind(this));
|
||||
|
||||
// add extra state handlers specific to DelimiterMaster with v0 format
|
||||
this.setKeyHandler(
|
||||
DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||
this.keyHandler_SkippingVersionsV0.bind(this));
|
||||
|
||||
this.setKeyHandler(
|
||||
DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
||||
this.keyHandler_WaitVersionAfterPHDV0.bind(this));
|
||||
|
||||
this.setKeyHandler(
|
||||
DelimiterMasterFilterStateId.SkippingGapV0,
|
||||
this.keyHandler_SkippingGapV0.bind(this));
|
||||
|
||||
if (this.marker) {
|
||||
// distinct initial state to include some special logic
|
||||
// before the first master key is found that does not have
|
||||
// to be checked afterwards
|
||||
this.state = <DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||
masterKey: this.marker,
|
||||
};
|
||||
} else {
|
||||
this.state = <DelimiterFilterState_NotSkipping> {
|
||||
id: DelimiterFilterStateId.NotSkipping,
|
||||
};
|
||||
}
|
||||
} else {
|
||||
// save base implementation of the `NotSkipping` state in
|
||||
// Delimiter before overriding it with ours, to be able to call it from there
|
||||
this.keyHandler_NotSkipping_Delimiter = this.keyHandlers[DelimiterFilterStateId.NotSkipping];
|
||||
this.setKeyHandler(
|
||||
DelimiterFilterStateId.NotSkipping,
|
||||
this.keyHandler_NotSkippingPrefixNorVersionsV1.bind(this));
|
||||
}
|
||||
// in v1, we can directly use Delimiter's implementation,
|
||||
// which is already set to the proper state
|
||||
|
||||
// default initialization of the gap cache and building states, can be
|
||||
// set by refreshGapCache()
|
||||
this._gapCaching = {
|
||||
state: GapCachingState.NoGapCache,
|
||||
};
|
||||
this._gapBuilding = {
|
||||
state: GapBuildingState.Disabled,
|
||||
};
|
||||
this._refreshedBuildingParams = null;
|
||||
Object.assign(
|
||||
this,
|
||||
{
|
||||
[BucketVersioningKeyFormat.v0]: {
|
||||
filter: this.filterV0,
|
||||
skipping: this.skippingV0,
|
||||
},
|
||||
[BucketVersioningKeyFormat.v1]: {
|
||||
filter: this.filterV1,
|
||||
skipping: this.skippingV1,
|
||||
},
|
||||
}[this.vFormat]
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the validity period left before a refresh of the gap cache is needed
|
||||
* to continue building new gaps.
|
||||
*
|
||||
* @return {number|null} one of:
|
||||
* - the remaining time in milliseconds in which gaps can be added to the
|
||||
* cache before a call to refreshGapCache() is required
|
||||
* - or 0 if there is no time left and a call to refreshGapCache() is required
|
||||
* to resume caching gaps
|
||||
* - or null if refreshing the cache is never needed (because the gap cache
|
||||
* is either not available or not used)
|
||||
* Filter to apply on each iteration for buckets in v0 format,
|
||||
* based on:
|
||||
* - prefix
|
||||
* - delimiter
|
||||
* - maxKeys
|
||||
* The marker is being handled directly by levelDB
|
||||
* @param {Object} obj - The key and value of the element
|
||||
* @param {String} obj.key - The key of the element
|
||||
* @param {String} obj.value - The value of the element
|
||||
* @return {number} - indicates if iteration should continue
|
||||
*/
|
||||
getGapBuildingValidityPeriodMs(): number | null {
|
||||
let gapBuilding;
|
||||
switch (this._gapBuilding.state) {
|
||||
case GapBuildingState.Disabled:
|
||||
return null;
|
||||
case GapBuildingState.Expired:
|
||||
return 0;
|
||||
case GapBuildingState.NotBuilding:
|
||||
gapBuilding = <GapBuildingInfo_NotBuilding> this._gapBuilding;
|
||||
break;
|
||||
case GapBuildingState.Building:
|
||||
gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
||||
break;
|
||||
}
|
||||
const { gapCache, params } = gapBuilding;
|
||||
const elapsedTime = Date.now() - params.initTimestamp;
|
||||
return Math.max(gapCache.exposureDelayMs - elapsedTime, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh the gaps caching logic (gaps are series of current delete markers
|
||||
* in V0 bucket metadata format). It has two effects:
|
||||
*
|
||||
* - starts exposing existing and future gaps from the cache to efficiently
|
||||
* skip over series of current delete markers that have been seen and cached
|
||||
* earlier
|
||||
*
|
||||
* - enables building and caching new gaps (or extend existing ones), for a
|
||||
* limited time period defined by the `gapCacheProxy.exposureDelayMs` value
|
||||
* in milliseconds. To refresh the validity period and resume building and
|
||||
* caching new gaps, one must restart a new listing from the database (starting
|
||||
* at the current listing key, included), then call refreshGapCache() again.
|
||||
*
|
||||
* @param {GapCacheInterface} gapCacheProxy - API proxy to the gaps cache
|
||||
* (the proxy should handle prefixing object keys with the bucket name)
|
||||
* @param {number} [minGapWeight=100] - minimum weight of a gap for it to be
|
||||
* added in the cache
|
||||
* @param {number} [triggerSaveGapWeight] - cumulative weight to wait for
|
||||
* before saving the current building gap. Cannot be greater than
|
||||
* `gapCacheProxy.maxGapWeight` (the value is thresholded to `maxGapWeight`
|
||||
* otherwise). Defaults to `gapCacheProxy.maxGapWeight / 2`.
|
||||
* @return {undefined}
|
||||
*/
|
||||
refreshGapCache(
|
||||
gapCacheProxy: GapCacheInterface,
|
||||
minGapWeight?: number,
|
||||
triggerSaveGapWeight?: number
|
||||
): void {
|
||||
if (this.vFormat !== BucketVersioningKeyFormat.v0) {
|
||||
return;
|
||||
}
|
||||
if (this._gapCaching.state === GapCachingState.NoGapCache) {
|
||||
this._gapCaching = {
|
||||
state: GapCachingState.UnknownGap,
|
||||
gapCache: gapCacheProxy,
|
||||
};
|
||||
}
|
||||
const refreshedBuildingParams: GapBuildingParams = {
|
||||
minGapWeight: minGapWeight || 100,
|
||||
triggerSaveGapWeight: triggerSaveGapWeight
|
||||
|| Math.trunc(gapCacheProxy.maxGapWeight / 2),
|
||||
initTimestamp: Date.now(),
|
||||
};
|
||||
if (this._gapBuilding.state === GapBuildingState.Building) {
|
||||
// refreshed params will be applied as soon as the current building gap is saved
|
||||
this._refreshedBuildingParams = refreshedBuildingParams;
|
||||
} else {
|
||||
this._gapBuilding = {
|
||||
state: GapBuildingState.NotBuilding,
|
||||
gapCache: gapCacheProxy,
|
||||
params: refreshedBuildingParams,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger a lookup of the closest upcoming or already skippable gap.
|
||||
*
|
||||
* @param {string} fromKey - lookup a gap not before 'fromKey'
|
||||
* @return {undefined} - the lookup is asynchronous and its
|
||||
* response is handled inside this function
|
||||
*/
|
||||
_triggerGapLookup(gapCaching: GapCachingInfo_NoCachedGap, fromKey: string): void {
|
||||
this._gapCaching = {
|
||||
state: GapCachingState.GapLookupInProgress,
|
||||
gapCache: gapCaching.gapCache,
|
||||
};
|
||||
const maxKey = this.prefix ? inc(this.prefix) : undefined;
|
||||
gapCaching.gapCache.lookupGap(fromKey, maxKey).then(_gap => {
|
||||
const gap = <GapSetEntry | null> _gap;
|
||||
if (gap) {
|
||||
this._gapCaching = {
|
||||
state: GapCachingState.GapCached,
|
||||
gapCache: gapCaching.gapCache,
|
||||
gapCached: gap,
|
||||
};
|
||||
} else {
|
||||
this._gapCaching = {
|
||||
state: GapCachingState.NoMoreGap,
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_checkGapOnMasterDeleteMarker(key: string): FilterReturnValue {
|
||||
switch (this._gapBuilding.state) {
|
||||
case GapBuildingState.Disabled:
|
||||
case GapBuildingState.Expired:
|
||||
break;
|
||||
case GapBuildingState.NotBuilding:
|
||||
this._createBuildingGap(key, 1);
|
||||
break;
|
||||
case GapBuildingState.Building:
|
||||
this._updateBuildingGap(key);
|
||||
break;
|
||||
}
|
||||
if (this._gapCaching.state === GapCachingState.GapCached) {
|
||||
const { gapCached } = this._gapCaching;
|
||||
if (key >= gapCached.firstKey) {
|
||||
if (key <= gapCached.lastKey) {
|
||||
// we are inside the last looked up cached gap: transition to
|
||||
// 'SkippingGapV0' state
|
||||
this.setState(<DelimiterMasterFilterState_SkippingGapV0> {
|
||||
id: DelimiterMasterFilterStateId.SkippingGapV0,
|
||||
});
|
||||
// cut the current gap before skipping, it will be merged or
|
||||
// chained with the existing one (depending on its weight)
|
||||
if (this._gapBuilding.state === GapBuildingState.Building) {
|
||||
// substract 1 from the weight because we are going to chain this gap,
|
||||
// which has an overlap of one key.
|
||||
this._gapBuilding.gap.weight -= 1;
|
||||
this._cutBuildingGap();
|
||||
}
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
// as we are past the cached gap, we will need another lookup
|
||||
this._gapCaching = {
|
||||
state: GapCachingState.UnknownGap,
|
||||
gapCache: this._gapCaching.gapCache,
|
||||
};
|
||||
}
|
||||
}
|
||||
if (this._gapCaching.state === GapCachingState.UnknownGap) {
|
||||
this._triggerGapLookup(this._gapCaching, key);
|
||||
}
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
filter_onNewMasterKeyV0(key: string, value: string): FilterReturnValue {
|
||||
// if this master key is a delete marker, accept it without
|
||||
// adding the version to the contents
|
||||
if (Version.isDeleteMarker(value)) {
|
||||
// update the state to start skipping versions of the new master key
|
||||
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||
masterKey: key,
|
||||
});
|
||||
return this._checkGapOnMasterDeleteMarker(key);
|
||||
}
|
||||
if (Version.isPHD(value)) {
|
||||
// master version is a PHD version: wait for the first
|
||||
// following version that will be considered as the actual
|
||||
// master key
|
||||
this.setState(<DelimiterMasterFilterState_WaitVersionAfterPHDV0> {
|
||||
id: DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
||||
masterKey: key,
|
||||
});
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
// cut the current gap as soon as a non-deleted entry is seen
|
||||
this._cutBuildingGap();
|
||||
filterV0(obj: { key: string; value: string }): number {
|
||||
let key = obj.key;
|
||||
const value = obj.value;
|
||||
|
||||
if (key.startsWith(DbPrefixes.Replay)) {
|
||||
// skip internal replay prefix entirely
|
||||
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
||||
id: DelimiterFilterStateId.SkippingPrefix,
|
||||
prefix: DbPrefixes.Replay,
|
||||
});
|
||||
this.inReplayPrefix = true;
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
if (this._reachedMaxKeys()) {
|
||||
return FILTER_END;
|
||||
this.inReplayPrefix = false;
|
||||
|
||||
/* Skip keys not starting with the prefix or not alphabetically
|
||||
* ordered. */
|
||||
if (
|
||||
(this.prefix && !key.startsWith(this.prefix)) ||
|
||||
(typeof this[this.nextContinueMarker] === 'string' &&
|
||||
key <= this[this.nextContinueMarker])
|
||||
) {
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
|
||||
const commonPrefix = this.addCommonPrefixOrContents(key, value);
|
||||
if (commonPrefix) {
|
||||
// transition into SkippingPrefix state to skip all following keys
|
||||
// while they start with the same prefix
|
||||
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
||||
id: DelimiterFilterStateId.SkippingPrefix,
|
||||
prefix: commonPrefix,
|
||||
});
|
||||
/* Skip version keys (<key><versionIdSeparator><version>) if we already
|
||||
* have a master version. */
|
||||
const versionIdIndex = key.indexOf(VID_SEP);
|
||||
if (versionIdIndex >= 0) {
|
||||
key = key.slice(0, versionIdIndex);
|
||||
/* - key === this.prvKey is triggered when a master version has
|
||||
* been accepted for this key,
|
||||
* - key === this.NextMarker or this.NextContinueToken is triggered
|
||||
* when a listing page ends on an accepted obj and the next page
|
||||
* starts with a version of this object.
|
||||
* In that case prvKey is default set to undefined
|
||||
* in the constructor and comparing to NextMarker is the only
|
||||
* way to know we should not accept this version. This test is
|
||||
* not redundant with the one at the beginning of this function,
|
||||
* we are comparing here the key without the version suffix,
|
||||
* - key startsWith the previous NextMarker happens because we set
|
||||
* NextMarker to the common prefix instead of the whole key
|
||||
* value. (TODO: remove this test once ZENKO-1048 is fixed)
|
||||
* */
|
||||
if (
|
||||
key === this.prvKey ||
|
||||
key === this[this.nextContinueMarker] ||
|
||||
(this.delimiter &&
|
||||
key.startsWith(this[this.nextContinueMarker]))
|
||||
) {
|
||||
/* master version already filtered */
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
}
|
||||
if (Version.isPHD(value)) {
|
||||
/* master version is a PHD version, we want to wait for the next
|
||||
* one:
|
||||
* - Set the prvKey to undefined to not skip the next version,
|
||||
* - return accept to avoid users to skip the next values in range
|
||||
* (skip scan mechanism in metadata backend like Metadata or
|
||||
* MongoClient). */
|
||||
this.prvKey = undefined;
|
||||
this.prvPHDKey = key;
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
// update the state to start skipping versions of the new master key
|
||||
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||
masterKey: key,
|
||||
});
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
keyHandler_NotSkippingPrefixNorVersionsV0(key: string, value: string): FilterReturnValue {
|
||||
return this.filter_onNewMasterKeyV0(key, value);
|
||||
}
|
||||
|
||||
filter_onNewMasterKeyV1(key: string, value: string): FilterReturnValue {
|
||||
// if this master key is a delete marker, accept it without
|
||||
// adding the version to the contents
|
||||
if (Version.isDeleteMarker(value)) {
|
||||
/* This entry is a deleteMarker which has not been filtered by the
|
||||
* version test. Either :
|
||||
* - it is a deleteMarker on the master version, we want to SKIP
|
||||
* all the following entries with this key (no master version),
|
||||
* - or a deleteMarker following a PHD (setting prvKey to undefined
|
||||
* when an entry is a PHD avoids the skip on version for the
|
||||
* next entry). In that case we expect the master version to
|
||||
* follow. */
|
||||
if (key === this.prvPHDKey) {
|
||||
this.prvKey = undefined;
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
// use base Delimiter's implementation
|
||||
return this.keyHandler_NotSkipping_Delimiter(key, value);
|
||||
}
|
||||
|
||||
keyHandler_NotSkippingPrefixNorVersionsV1(key: string, value: string): FilterReturnValue {
|
||||
return this.filter_onNewMasterKeyV1(key, value);
|
||||
}
|
||||
|
||||
keyHandler_SkippingVersionsV0(key: string, value: string): FilterReturnValue {
|
||||
/* In the SkippingVersionsV0 state, skip all version keys
|
||||
* (<key><versionIdSeparator><version>) */
|
||||
const versionIdIndex = key.indexOf(VID_SEP);
|
||||
if (versionIdIndex !== -1) {
|
||||
// version keys count in the building gap weight because they must
|
||||
// also be listed until skipped
|
||||
if (this._gapBuilding.state === GapBuildingState.Building) {
|
||||
this._updateBuildingGap(key);
|
||||
}
|
||||
this.prvKey = key;
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
return this.filter_onNewMasterKeyV0(key, value);
|
||||
}
|
||||
|
||||
keyHandler_WaitVersionAfterPHDV0(key: string, value: string): FilterReturnValue {
|
||||
// After a PHD key is encountered, the next version key of the
|
||||
// same object if it exists is the new master key, hence
|
||||
// consider it as such and call 'onNewMasterKeyV0' (the test
|
||||
// 'masterKey == phdKey' is probably redundant when we already
|
||||
// know we have a versioned key, since all objects in v0 have
|
||||
// a master key, but keeping it in doubt)
|
||||
const { masterKey: phdKey } = <DelimiterMasterFilterState_WaitVersionAfterPHDV0> this.state;
|
||||
const versionIdIndex = key.indexOf(VID_SEP);
|
||||
if (versionIdIndex !== -1) {
|
||||
const masterKey = key.slice(0, versionIdIndex);
|
||||
if (masterKey === phdKey) {
|
||||
return this.filter_onNewMasterKeyV0(masterKey, value);
|
||||
this.prvKey = key;
|
||||
if (this.delimiter) {
|
||||
// check if the key has the delimiter
|
||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
||||
if (delimiterIndex >= 0) {
|
||||
// try to add the prefix to the list
|
||||
return this.addCommonPrefix(key, delimiterIndex);
|
||||
}
|
||||
}
|
||||
return this.filter_onNewMasterKeyV0(key, value);
|
||||
}
|
||||
|
||||
keyHandler_SkippingGapV0(key: string, value: string): FilterReturnValue {
|
||||
const { gapCache, gapCached } = <GapCachingInfo_GapCached> this._gapCaching;
|
||||
if (key <= gapCached.lastKey) {
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
this._gapCaching = {
|
||||
state: GapCachingState.UnknownGap,
|
||||
gapCache,
|
||||
};
|
||||
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||
});
|
||||
// Start a gap with weight=0 from the latest skippable key. This will
|
||||
// allow to extend the gap just skipped with a chained gap in case
|
||||
// other delete markers are seen after the existing gap is skipped.
|
||||
this._createBuildingGap(gapCached.lastKey, 0, gapCached.weight);
|
||||
|
||||
return this.handleKey(key, value);
|
||||
}
|
||||
|
||||
skippingBase(): string | undefined {
|
||||
switch (this.state.id) {
|
||||
case DelimiterMasterFilterStateId.SkippingVersionsV0:
|
||||
const { masterKey } = <DelimiterMasterFilterState_SkippingVersionsV0> this.state;
|
||||
return masterKey + inc(VID_SEP);
|
||||
|
||||
case DelimiterMasterFilterStateId.SkippingGapV0:
|
||||
const { gapCached } = <GapCachingInfo_GapCached> this._gapCaching;
|
||||
return gapCached.lastKey;
|
||||
|
||||
default:
|
||||
return super.skippingBase();
|
||||
}
|
||||
}
|
||||
|
||||
result(): ResultObject {
|
||||
this._cutBuildingGap();
|
||||
return super.result();
|
||||
}
|
||||
|
||||
_checkRefreshedBuildingParams(params: GapBuildingParams): GapBuildingParams {
|
||||
if (this._refreshedBuildingParams) {
|
||||
const newParams = this._refreshedBuildingParams;
|
||||
this._refreshedBuildingParams = null;
|
||||
return newParams;
|
||||
}
|
||||
return params;
|
||||
return this.addContents(key, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Save the gap being built if allowed (i.e. still within the
|
||||
* allocated exposure time window).
|
||||
*
|
||||
* @return {boolean} - true if the gap was saved, false if we are
|
||||
* outside the allocated exposure time window.
|
||||
* Filter to apply on each iteration for buckets in v1 format,
|
||||
* based on:
|
||||
* - prefix
|
||||
* - delimiter
|
||||
* - maxKeys
|
||||
* The marker is being handled directly by levelDB
|
||||
* @param {Object} obj - The key and value of the element
|
||||
* @param {String} obj.key - The key of the element
|
||||
* @param {String} obj.value - The value of the element
|
||||
* @return {number} - indicates if iteration should continue
|
||||
*/
|
||||
_saveBuildingGap(): boolean {
|
||||
const { gapCache, params, gap, gapWeight } =
|
||||
<GapBuildingInfo_Building> this._gapBuilding;
|
||||
const totalElapsed = Date.now() - params.initTimestamp;
|
||||
if (totalElapsed >= gapCache.exposureDelayMs) {
|
||||
this._gapBuilding = {
|
||||
state: GapBuildingState.Expired,
|
||||
};
|
||||
this._refreshedBuildingParams = null;
|
||||
return false;
|
||||
}
|
||||
const { firstKey, lastKey, weight } = gap;
|
||||
gapCache.setGap(firstKey, lastKey, weight);
|
||||
this._gapBuilding = {
|
||||
state: GapBuildingState.Building,
|
||||
gapCache,
|
||||
params: this._checkRefreshedBuildingParams(params),
|
||||
gap: {
|
||||
firstKey: gap.lastKey,
|
||||
lastKey: gap.lastKey,
|
||||
weight: 0,
|
||||
},
|
||||
gapWeight,
|
||||
};
|
||||
return true;
|
||||
filterV1(obj: { key: string; value: string }): number {
|
||||
// Filtering master keys in v1 is simply listing the master
|
||||
// keys, as the state of version keys do not change the
|
||||
// result, so we can use Delimiter method directly.
|
||||
return super.filter(obj);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new gap to be extended afterwards
|
||||
*
|
||||
* @param {string} newKey - gap's first key
|
||||
* @param {number} startWeight - initial weight of the building gap (usually 0 or 1)
|
||||
* @param {number} [cachedWeight] - if continuing a cached gap, weight of the existing
|
||||
* cached portion
|
||||
* @return {undefined}
|
||||
*/
|
||||
_createBuildingGap(newKey: string, startWeight: number, cachedWeight?: number): void {
|
||||
if (this._gapBuilding.state === GapBuildingState.NotBuilding) {
|
||||
const { gapCache, params } = <GapBuildingInfo_NotBuilding> this._gapBuilding;
|
||||
this._gapBuilding = {
|
||||
state: GapBuildingState.Building,
|
||||
gapCache,
|
||||
params: this._checkRefreshedBuildingParams(params),
|
||||
gap: {
|
||||
firstKey: newKey,
|
||||
lastKey: newKey,
|
||||
weight: startWeight,
|
||||
},
|
||||
gapWeight: (cachedWeight || 0) + startWeight,
|
||||
};
|
||||
skippingBase() {
|
||||
if (this[this.nextContinueMarker]) {
|
||||
// next marker or next continuation token:
|
||||
// - foo/ : skipping foo/
|
||||
// - foo : skipping foo.
|
||||
const index = this[this.nextContinueMarker].lastIndexOf(
|
||||
this.delimiter
|
||||
);
|
||||
if (index === this[this.nextContinueMarker].length - 1) {
|
||||
return this[this.nextContinueMarker];
|
||||
}
|
||||
return this[this.nextContinueMarker] + VID_SEP;
|
||||
}
|
||||
return SKIP_NONE;
|
||||
}
|
||||
|
||||
_updateBuildingGap(newKey: string): void {
|
||||
const gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
||||
const { params, gap } = gapBuilding;
|
||||
gap.lastKey = newKey;
|
||||
gap.weight += 1;
|
||||
gapBuilding.gapWeight += 1;
|
||||
// the GapCache API requires updating a gap regularly because it can only split
|
||||
// it once per update, by the known last key. In practice the default behavior
|
||||
// is to trigger an update after a number of keys that is half the maximum weight.
|
||||
// It is also useful for other listings to benefit from the cache sooner.
|
||||
if (gapBuilding.gapWeight >= params.minGapWeight &&
|
||||
gap.weight >= params.triggerSaveGapWeight) {
|
||||
this._saveBuildingGap();
|
||||
skippingV0() {
|
||||
if (this.inReplayPrefix) {
|
||||
return DbPrefixes.Replay;
|
||||
}
|
||||
return this.skippingBase();
|
||||
}
|
||||
|
||||
_cutBuildingGap(): void {
|
||||
if (this._gapBuilding.state === GapBuildingState.Building) {
|
||||
let gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
||||
let { gapCache, params, gap, gapWeight } = gapBuilding;
|
||||
// only set gaps that are significant enough in weight and
|
||||
// with a non-empty extension
|
||||
if (gapWeight >= params.minGapWeight && gap.weight > 0) {
|
||||
// we're done if we were not allowed to save the gap
|
||||
if (!this._saveBuildingGap()) {
|
||||
return;
|
||||
}
|
||||
// params may have been refreshed, reload them
|
||||
gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
||||
params = gapBuilding.params;
|
||||
}
|
||||
this._gapBuilding = {
|
||||
state: GapBuildingState.NotBuilding,
|
||||
gapCache,
|
||||
params,
|
||||
};
|
||||
skippingV1() {
|
||||
const skipTo = this.skippingBase();
|
||||
if (skipTo === SKIP_NONE) {
|
||||
return SKIP_NONE;
|
||||
}
|
||||
return DbPrefixes.Master + skipTo;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,202 +0,0 @@
|
|||
const { DelimiterVersions } = require('./delimiterVersions');
|
||||
const { FILTER_END, FILTER_SKIP } = require('./tools');
|
||||
|
||||
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
||||
|
||||
/**
|
||||
* Handle object listing with parameters. This extends the base class DelimiterVersions
|
||||
* to return the raw non-current versions objects.
|
||||
*/
|
||||
class DelimiterNonCurrent extends DelimiterVersions {
|
||||
/**
|
||||
* Delimiter listing of non-current versions.
|
||||
* @param {Object} parameters - listing parameters
|
||||
* @param {String} parameters.keyMarker - key marker
|
||||
* @param {String} parameters.versionIdMarker - version id marker
|
||||
* @param {String} parameters.beforeDate - limit the response to keys with stale date older than beforeDate.
|
||||
* “stale date” is the date on when a version becomes non-current.
|
||||
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
||||
* @param {String} parameters.excludedDataStoreName - exclude dataStoreName matches from the versions
|
||||
* @param {RequestLogger} logger - The logger of the request
|
||||
* @param {String} [vFormat] - versioning key format
|
||||
*/
|
||||
constructor(parameters, logger, vFormat) {
|
||||
super(parameters, logger, vFormat);
|
||||
|
||||
this.beforeDate = parameters.beforeDate;
|
||||
this.excludedDataStoreName = parameters.excludedDataStoreName;
|
||||
this.maxScannedLifecycleListingEntries = parameters.maxScannedLifecycleListingEntries;
|
||||
|
||||
// internal state
|
||||
this.prevKey = null;
|
||||
this.staleDate = null;
|
||||
|
||||
this.scannedKeys = 0;
|
||||
}
|
||||
|
||||
getLastModified(value) {
|
||||
let lastModified;
|
||||
try {
|
||||
const v = JSON.parse(value);
|
||||
lastModified = v['last-modified'];
|
||||
} catch (e) {
|
||||
this.logger.warn('could not parse Object Metadata while listing',
|
||||
{
|
||||
method: 'getLastModified',
|
||||
err: e.toString(),
|
||||
});
|
||||
}
|
||||
return lastModified;
|
||||
}
|
||||
|
||||
// Overwrite keyHandler_SkippingVersions to include the last version from the previous listing.
|
||||
// The creation (last-modified) date of this version will be the stale date for the following version.
|
||||
// eslint-disable-next-line camelcase
|
||||
keyHandler_SkippingVersions(key, versionId, value) {
|
||||
if (key === this.keyMarker) {
|
||||
// since the nonversioned key equals the marker, there is
|
||||
// necessarily a versionId in this key
|
||||
const _versionId = versionId;
|
||||
if (_versionId < this.versionIdMarker) {
|
||||
// skip all versions until marker
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
}
|
||||
this.setState({
|
||||
id: 1 /* NotSkipping */,
|
||||
});
|
||||
return this.handleKey(key, versionId, value);
|
||||
}
|
||||
|
||||
filter(obj) {
|
||||
if (this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries) {
|
||||
this.IsTruncated = true;
|
||||
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
||||
{
|
||||
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
||||
scannedKeys: this.scannedKeys,
|
||||
});
|
||||
return FILTER_END;
|
||||
}
|
||||
++this.scannedKeys;
|
||||
return super.filter(obj);
|
||||
}
|
||||
|
||||
/**
|
||||
* NOTE: Each version of a specific key is sorted from the latest to the oldest
|
||||
* thanks to the way version ids are generated.
|
||||
* DESCRIPTION: Skip the version if it represents the master key, but keep its last-modified date in memory,
|
||||
* which will be the stale date of the following version.
|
||||
* The following version is pushed only:
|
||||
* - if the "stale date" (picked up from the previous version) is available (JSON.parse has not failed),
|
||||
* - if "beforeDate" is not specified or if specified and the "stale date" is older.
|
||||
* - if "excludedDataStoreName" is not specified or if specified and the data store name is different
|
||||
* The in-memory "stale date" is then updated with the version's last-modified date to be used for
|
||||
* the following version.
|
||||
* The process stops and returns the available results if either:
|
||||
* - no more metadata key is left to be processed
|
||||
* - the listing reaches the maximum number of key to be returned
|
||||
* - the internal timeout is reached
|
||||
* @param {String} key - The key to add
|
||||
* @param {String} versionId - The version id
|
||||
* @param {String} value - The value of the key
|
||||
* @return {undefined}
|
||||
*/
|
||||
addVersion(key, versionId, value) {
|
||||
this.nextKeyMarker = key;
|
||||
this.nextVersionIdMarker = versionId;
|
||||
|
||||
// Skip the version if it represents the non-current version, but keep its last-modified date,
|
||||
// which will be the stale date of the following version.
|
||||
const isCurrentVersion = key !== this.prevKey;
|
||||
if (isCurrentVersion) {
|
||||
this.staleDate = this.getLastModified(value);
|
||||
this.prevKey = key;
|
||||
return;
|
||||
}
|
||||
|
||||
// The following version is pushed only:
|
||||
// - if the "stale date" (picked up from the previous version) is available (JSON.parse has not failed),
|
||||
// - if "beforeDate" is not specified or if specified and the "stale date" is older.
|
||||
// - if "excludedDataStoreName" is not specified or if specified and the data store name is different
|
||||
let lastModified;
|
||||
if (this.staleDate && (!this.beforeDate || this.staleDate < this.beforeDate)) {
|
||||
const parsedValue = this._parse(value);
|
||||
// if parsing fails, skip the key.
|
||||
if (parsedValue) {
|
||||
const dataStoreName = parsedValue.dataStoreName;
|
||||
lastModified = parsedValue['last-modified'];
|
||||
if (!this.excludedDataStoreName || dataStoreName !== this.excludedDataStoreName) {
|
||||
const s = this._stringify(parsedValue, this.staleDate);
|
||||
// check that _stringify succeeds to only push objects with a defined staleDate.
|
||||
if (s) {
|
||||
this.Versions.push({ key, value: s });
|
||||
++this.keys;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The in-memory "stale date" is then updated with the version's last-modified date to be used for
|
||||
// the following version.
|
||||
this.staleDate = lastModified || this.getLastModified(value);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the stringified entry's value and remove the location property if too large.
|
||||
* @param {string} s - sringified value
|
||||
* @return {object} p - undefined if parsing fails, otherwise it contains the parsed value.
|
||||
*/
|
||||
_parse(s) {
|
||||
let p;
|
||||
try {
|
||||
p = JSON.parse(s);
|
||||
if (s.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
||||
delete p.location;
|
||||
}
|
||||
} catch (e) {
|
||||
this.logger.warn('Could not parse Object Metadata while listing', {
|
||||
method: 'DelimiterNonCurrent._parse',
|
||||
err: e.toString(),
|
||||
});
|
||||
}
|
||||
return p;
|
||||
}
|
||||
|
||||
_stringify(parsedMD, staleDate) {
|
||||
const p = parsedMD;
|
||||
let s = undefined;
|
||||
p.staleDate = staleDate;
|
||||
try {
|
||||
s = JSON.stringify(p);
|
||||
} catch (e) {
|
||||
this.logger.warn('could not stringify Object Metadata while listing', {
|
||||
method: 'DelimiterNonCurrent._stringify',
|
||||
err: e.toString(),
|
||||
});
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
result() {
|
||||
const { Versions, IsTruncated, NextKeyMarker, NextVersionIdMarker } = super.result();
|
||||
|
||||
const result = {
|
||||
Contents: Versions,
|
||||
IsTruncated,
|
||||
};
|
||||
|
||||
if (NextKeyMarker) {
|
||||
result.NextKeyMarker = NextKeyMarker;
|
||||
}
|
||||
|
||||
if (NextVersionIdMarker) {
|
||||
result.NextVersionIdMarker = NextVersionIdMarker;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
module.exports = { DelimiterNonCurrent };
|
|
@ -1,204 +0,0 @@
|
|||
const DelimiterVersions = require('./delimiterVersions').DelimiterVersions;
|
||||
const { FILTER_END } = require('./tools');
|
||||
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
||||
/**
|
||||
* Handle object listing with parameters. This extends the base class DelimiterVersions
|
||||
* to return the orphan delete markers. Orphan delete markers are also
|
||||
* refered as expired object delete marker.
|
||||
* They are delete marker with zero noncurrent versions.
|
||||
*/
|
||||
class DelimiterOrphanDeleteMarker extends DelimiterVersions {
|
||||
/**
|
||||
* Delimiter listing of orphan delete markers.
|
||||
* @param {Object} parameters - listing parameters
|
||||
* @param {String} parameters.beforeDate - limit the response to keys older than beforeDate
|
||||
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
||||
* @param {RequestLogger} logger - The logger of the request
|
||||
* @param {String} [vFormat] - versioning key format
|
||||
*/
|
||||
constructor(parameters, logger, vFormat) {
|
||||
const {
|
||||
marker,
|
||||
maxKeys,
|
||||
prefix,
|
||||
beforeDate,
|
||||
maxScannedLifecycleListingEntries,
|
||||
} = parameters;
|
||||
|
||||
const versionParams = {
|
||||
// The orphan delete marker logic uses the term 'marker' instead of 'keyMarker',
|
||||
// as the latter could suggest the presence of a 'versionIdMarker'.
|
||||
keyMarker: marker,
|
||||
maxKeys,
|
||||
prefix,
|
||||
};
|
||||
super(versionParams, logger, vFormat);
|
||||
|
||||
this.maxScannedLifecycleListingEntries = maxScannedLifecycleListingEntries;
|
||||
this.beforeDate = beforeDate;
|
||||
// this.prevKeyName is used as a marker for the next listing when the current one reaches its entry limit.
|
||||
// We cannot rely on this.keyName, as it contains the name of the current key.
|
||||
// In the event of a listing interruption due to reaching the maximum scanned entries,
|
||||
// relying on this.keyName would cause the next listing to skip the current key because S3 starts
|
||||
// listing after the marker.
|
||||
this.prevKeyName = null;
|
||||
this.keyName = null;
|
||||
this.value = null;
|
||||
this.scannedKeys = 0;
|
||||
}
|
||||
|
||||
_reachedMaxKeys() {
|
||||
if (this.keys >= this.maxKeys) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
_addOrphan() {
|
||||
const parsedValue = this._parse(this.value);
|
||||
// if parsing fails, skip the key.
|
||||
if (parsedValue) {
|
||||
const lastModified = parsedValue['last-modified'];
|
||||
const isDeleteMarker = parsedValue.isDeleteMarker;
|
||||
// We then check if the orphan version is a delete marker and if it is older than the "beforeDate"
|
||||
if ((!this.beforeDate || (lastModified && lastModified < this.beforeDate)) && isDeleteMarker) {
|
||||
// Prefer returning an untrimmed data rather than stopping the service in case of parsing failure.
|
||||
const s = this._stringify(parsedValue) || this.value;
|
||||
this.Versions.push({ key: this.keyName, value: s });
|
||||
this.nextKeyMarker = this.keyName;
|
||||
++this.keys;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses the stringified entry's value and remove the location property if too large.
|
||||
* @param {string} s - sringified value
|
||||
* @return {object} p - undefined if parsing fails, otherwise it contains the parsed value.
|
||||
*/
|
||||
_parse(s) {
|
||||
let p;
|
||||
try {
|
||||
p = JSON.parse(s);
|
||||
if (s.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
||||
delete p.location;
|
||||
}
|
||||
} catch (e) {
|
||||
this.logger.warn('Could not parse Object Metadata while listing', {
|
||||
method: 'DelimiterOrphanDeleteMarker._parse',
|
||||
err: e.toString(),
|
||||
});
|
||||
}
|
||||
return p;
|
||||
}
|
||||
|
||||
_stringify(value) {
|
||||
const p = value;
|
||||
let s = undefined;
|
||||
try {
|
||||
s = JSON.stringify(p);
|
||||
} catch (e) {
|
||||
this.logger.warn('could not stringify Object Metadata while listing',
|
||||
{
|
||||
method: 'DelimiterOrphanDeleteMarker._stringify',
|
||||
err: e.toString(),
|
||||
});
|
||||
}
|
||||
return s;
|
||||
}
|
||||
/**
|
||||
* The purpose of _isMaxScannedEntriesReached is to restrict the number of scanned entries,
|
||||
* thus controlling resource overhead (CPU...).
|
||||
* @return {boolean} isMaxScannedEntriesReached - true if the maximum limit on the number
|
||||
* of entries scanned has been reached, false otherwise.
|
||||
*/
|
||||
_isMaxScannedEntriesReached() {
|
||||
return this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries;
|
||||
}
|
||||
|
||||
filter(obj) {
|
||||
if (this._isMaxScannedEntriesReached()) {
|
||||
this.nextKeyMarker = this.prevKeyName;
|
||||
this.IsTruncated = true;
|
||||
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
||||
{
|
||||
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
||||
scannedKeys: this.scannedKeys,
|
||||
});
|
||||
return FILTER_END;
|
||||
}
|
||||
++this.scannedKeys;
|
||||
return super.filter(obj);
|
||||
}
|
||||
|
||||
/**
|
||||
* NOTE: Each version of a specific key is sorted from the latest to the oldest
|
||||
* thanks to the way version ids are generated.
|
||||
* DESCRIPTION: For a given key, the latest version is kept in memory since it is the current version.
|
||||
* If the following version reference a new key, it means that the previous one was an orphan version.
|
||||
* We then check if the orphan version is a delete marker and if it is older than the "beforeDate"
|
||||
* The process stops and returns the available results if either:
|
||||
* - no more metadata key is left to be processed
|
||||
* - the listing reaches the maximum number of key to be returned
|
||||
* - the internal timeout is reached
|
||||
* NOTE: we cannot leverage MongoDB to list keys older than "beforeDate"
|
||||
* because then we will not be able to assess its orphanage.
|
||||
* @param {String} key - The object key.
|
||||
* @param {String} versionId - The object version id.
|
||||
* @param {String} value - The value of the key
|
||||
* @return {undefined}
|
||||
*/
|
||||
addVersion(key, versionId, value) {
|
||||
// For a given key, the youngest version is kept in memory since it represents the current version.
|
||||
if (key !== this.keyName) {
|
||||
// If this.value is defined, it means that <this.keyName, this.value> pair is "allowed" to be an orphan.
|
||||
if (this.value) {
|
||||
this._addOrphan();
|
||||
}
|
||||
this.prevKeyName = this.keyName;
|
||||
this.keyName = key;
|
||||
this.value = value;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// If the key is not the current version, we can skip it in the next listing
|
||||
// in the case where the current listing is interrupted due to reaching the maximum scanned entries.
|
||||
this.prevKeyName = key;
|
||||
this.keyName = key;
|
||||
this.value = null;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
result() {
|
||||
// Only check for remaining last orphan delete marker if the listing is not interrupted.
|
||||
// This will help avoid false positives.
|
||||
if (!this._isMaxScannedEntriesReached()) {
|
||||
// The following check makes sure the last orphan delete marker is not forgotten.
|
||||
if (this.keys < this.maxKeys) {
|
||||
if (this.value) {
|
||||
this._addOrphan();
|
||||
}
|
||||
// The following make sure that if makeKeys is reached, isTruncated is set to true.
|
||||
// We moved the "isTruncated" from _reachedMaxKeys to make sure we take into account the last entity
|
||||
// if listing is truncated right before the last entity and the last entity is a orphan delete marker.
|
||||
} else {
|
||||
this.IsTruncated = this.maxKeys > 0;
|
||||
}
|
||||
}
|
||||
|
||||
const result = {
|
||||
Contents: this.Versions,
|
||||
IsTruncated: this.IsTruncated,
|
||||
};
|
||||
|
||||
if (this.IsTruncated) {
|
||||
result.NextMarker = this.nextKeyMarker;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { DelimiterOrphanDeleteMarker };
|
|
@ -1,61 +1,17 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const Extension = require('./Extension').default;
|
||||
|
||||
import { Delimiter } from './delimiter';
|
||||
import { Version } from '../../versioning/Version';
|
||||
import { VersioningConstants as VSConst } from '../../versioning/constants';
|
||||
import {
|
||||
FilterState,
|
||||
FilterReturnValue,
|
||||
} from './delimiter';
|
||||
|
||||
const Version = require('../../versioning/Version').Version;
|
||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
|
||||
require('./tools');
|
||||
inc,
|
||||
FILTER_END,
|
||||
FILTER_ACCEPT,
|
||||
FILTER_SKIP,
|
||||
SKIP_NONE,
|
||||
} from './tools';
|
||||
|
||||
const VID_SEP = VSConst.VersionId.Separator;
|
||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||
|
||||
export const enum DelimiterVersionsFilterStateId {
|
||||
NotSkipping = 1,
|
||||
SkippingPrefix = 2,
|
||||
SkippingVersions = 3,
|
||||
};
|
||||
|
||||
export interface DelimiterVersionsFilterState_NotSkipping extends FilterState {
|
||||
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||
};
|
||||
|
||||
export interface DelimiterVersionsFilterState_SkippingPrefix extends FilterState {
|
||||
id: DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||
prefix: string;
|
||||
};
|
||||
|
||||
export interface DelimiterVersionsFilterState_SkippingVersions extends FilterState {
|
||||
id: DelimiterVersionsFilterStateId.SkippingVersions,
|
||||
gt: string;
|
||||
};
|
||||
|
||||
type KeyHandler = (key: string, versionId: string | undefined, value: string) => FilterReturnValue;
|
||||
|
||||
type ResultObject = {
|
||||
CommonPrefixes: string[],
|
||||
Versions: {
|
||||
key: string;
|
||||
value: string;
|
||||
versionId: string;
|
||||
}[];
|
||||
IsTruncated: boolean;
|
||||
Delimiter ?: string;
|
||||
NextKeyMarker ?: string;
|
||||
NextVersionIdMarker ?: string;
|
||||
};
|
||||
|
||||
type GenMDParamsItem = {
|
||||
gt ?: string,
|
||||
gte ?: string,
|
||||
lt ?: string,
|
||||
};
|
||||
|
||||
/**
|
||||
* Handle object listing with parameters
|
||||
*
|
||||
|
@ -68,101 +24,66 @@ type GenMDParamsItem = {
|
|||
* @prop {String|undefined} prefix - prefix per amazon format
|
||||
* @prop {Number} maxKeys - number of keys to list
|
||||
*/
|
||||
export class DelimiterVersions extends Extension {
|
||||
|
||||
state: FilterState;
|
||||
keyHandlers: { [id: number]: KeyHandler };
|
||||
export class DelimiterVersions extends Delimiter {
|
||||
CommonPrefixes: string[];
|
||||
Contents: string[];
|
||||
IsTruncated: boolean;
|
||||
NextMarker?: string;
|
||||
keys: number;
|
||||
delimiter?: string;
|
||||
prefix?: string;
|
||||
maxKeys: number;
|
||||
|
||||
constructor(parameters, logger, vFormat) {
|
||||
super(parameters, logger);
|
||||
// original listing parameters
|
||||
this.delimiter = parameters.delimiter;
|
||||
this.prefix = parameters.prefix;
|
||||
this.maxKeys = parameters.maxKeys || 1000;
|
||||
super(parameters, logger, vFormat);
|
||||
// specific to version listing
|
||||
this.keyMarker = parameters.keyMarker;
|
||||
this.versionIdMarker = parameters.versionIdMarker;
|
||||
// internal state
|
||||
this.masterKey = undefined;
|
||||
this.masterVersionId = undefined;
|
||||
this.nullKey = null;
|
||||
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
||||
// listing results
|
||||
this.CommonPrefixes = [];
|
||||
this.Versions = [];
|
||||
this.IsTruncated = false;
|
||||
this.nextKeyMarker = parameters.keyMarker;
|
||||
this.nextVersionIdMarker = undefined;
|
||||
this.NextMarker = parameters.keyMarker;
|
||||
this.NextVersionIdMarker = undefined;
|
||||
this.inReplayPrefix = false;
|
||||
|
||||
this.keyHandlers = {};
|
||||
|
||||
Object.assign(this, {
|
||||
Object.assign(
|
||||
this,
|
||||
{
|
||||
[BucketVersioningKeyFormat.v0]: {
|
||||
genMDParams: this.genMDParamsV0,
|
||||
getObjectKey: this.getObjectKeyV0,
|
||||
filter: this.filterV0,
|
||||
skipping: this.skippingV0,
|
||||
},
|
||||
[BucketVersioningKeyFormat.v1]: {
|
||||
genMDParams: this.genMDParamsV1,
|
||||
getObjectKey: this.getObjectKeyV1,
|
||||
filter: this.filterV1,
|
||||
skipping: this.skippingV1,
|
||||
},
|
||||
}[this.vFormat]);
|
||||
|
||||
if (this.vFormat === BucketVersioningKeyFormat.v0) {
|
||||
this.setKeyHandler(
|
||||
DelimiterVersionsFilterStateId.NotSkipping,
|
||||
this.keyHandler_NotSkippingV0.bind(this));
|
||||
} else {
|
||||
this.setKeyHandler(
|
||||
DelimiterVersionsFilterStateId.NotSkipping,
|
||||
this.keyHandler_NotSkippingV1.bind(this));
|
||||
}
|
||||
this.setKeyHandler(
|
||||
DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||
this.keyHandler_SkippingPrefix.bind(this));
|
||||
|
||||
this.setKeyHandler(
|
||||
DelimiterVersionsFilterStateId.SkippingVersions,
|
||||
this.keyHandler_SkippingVersions.bind(this));
|
||||
|
||||
if (this.versionIdMarker) {
|
||||
this.state = <DelimiterVersionsFilterState_SkippingVersions> {
|
||||
id: DelimiterVersionsFilterStateId.SkippingVersions,
|
||||
gt: `${this.keyMarker}${VID_SEP}${this.versionIdMarker}`,
|
||||
};
|
||||
} else {
|
||||
this.state = <DelimiterVersionsFilterState_NotSkipping> {
|
||||
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||
};
|
||||
}
|
||||
}[this.vFormat]
|
||||
);
|
||||
}
|
||||
|
||||
genMDParamsV0() {
|
||||
const params: GenMDParamsItem = {};
|
||||
if (this.prefix) {
|
||||
params.gte = this.prefix;
|
||||
params.lt = inc(this.prefix);
|
||||
const params = {};
|
||||
if (this.parameters.prefix) {
|
||||
params.gte = this.parameters.prefix;
|
||||
params.lt = inc(this.parameters.prefix);
|
||||
}
|
||||
if (this.keyMarker && this.delimiter) {
|
||||
const commonPrefix = this.getCommonPrefix(this.keyMarker);
|
||||
if (commonPrefix) {
|
||||
const afterPrefix = inc(commonPrefix);
|
||||
if (!params.gte || afterPrefix > params.gte) {
|
||||
params.gte = afterPrefix;
|
||||
if (this.parameters.keyMarker) {
|
||||
if (params.gte && params.gte > this.parameters.keyMarker) {
|
||||
return params;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (this.keyMarker && (!params.gte || this.keyMarker >= params.gte)) {
|
||||
delete params.gte;
|
||||
if (this.versionIdMarker) {
|
||||
// start from the beginning of versions so we can
|
||||
// check if there's a null key and fetch it
|
||||
// (afterwards, we can skip the rest of versions until
|
||||
// we reach versionIdMarker)
|
||||
params.gte = `${this.keyMarker}${VID_SEP}`;
|
||||
if (this.parameters.versionIdMarker) {
|
||||
// versionIdMarker should always come with keyMarker
|
||||
// but may not be the other way around
|
||||
params.gt =
|
||||
this.parameters.keyMarker +
|
||||
VID_SEP +
|
||||
this.parameters.versionIdMarker;
|
||||
} else {
|
||||
params.gt = `${this.keyMarker}${inc(VID_SEP)}`;
|
||||
params.gt = inc(this.parameters.keyMarker + VID_SEP);
|
||||
}
|
||||
}
|
||||
return params;
|
||||
|
@ -171,41 +92,44 @@ export class DelimiterVersions extends Extension {
|
|||
genMDParamsV1() {
|
||||
// return an array of two listing params sets to ask for
|
||||
// synchronized listing of M and V ranges
|
||||
const v0Params: GenMDParamsItem = this.genMDParamsV0();
|
||||
const mParams: GenMDParamsItem = {};
|
||||
const vParams: GenMDParamsItem = {};
|
||||
if (v0Params.gt) {
|
||||
mParams.gt = `${DbPrefixes.Master}${v0Params.gt}`;
|
||||
vParams.gt = `${DbPrefixes.Version}${v0Params.gt}`;
|
||||
} else if (v0Params.gte) {
|
||||
mParams.gte = `${DbPrefixes.Master}${v0Params.gte}`;
|
||||
vParams.gte = `${DbPrefixes.Version}${v0Params.gte}`;
|
||||
const params = [{}, {}];
|
||||
if (this.parameters.prefix) {
|
||||
params[0].gte = DbPrefixes.Master + this.parameters.prefix;
|
||||
params[0].lt = DbPrefixes.Master + inc(this.parameters.prefix);
|
||||
params[1].gte = DbPrefixes.Version + this.parameters.prefix;
|
||||
params[1].lt = DbPrefixes.Version + inc(this.parameters.prefix);
|
||||
} else {
|
||||
mParams.gte = DbPrefixes.Master;
|
||||
vParams.gte = DbPrefixes.Version;
|
||||
params[0].gte = DbPrefixes.Master;
|
||||
params[0].lt = inc(DbPrefixes.Master); // stop after the last master key
|
||||
params[1].gte = DbPrefixes.Version;
|
||||
params[1].lt = inc(DbPrefixes.Version); // stop after the last version key
|
||||
}
|
||||
if (v0Params.lt) {
|
||||
mParams.lt = `${DbPrefixes.Master}${v0Params.lt}`;
|
||||
vParams.lt = `${DbPrefixes.Version}${v0Params.lt}`;
|
||||
if (this.parameters.keyMarker) {
|
||||
if (
|
||||
params[1].gte <=
|
||||
DbPrefixes.Version + this.parameters.keyMarker
|
||||
) {
|
||||
delete params[0].gte;
|
||||
delete params[1].gte;
|
||||
params[0].gt =
|
||||
DbPrefixes.Master +
|
||||
inc(this.parameters.keyMarker + VID_SEP);
|
||||
if (this.parameters.versionIdMarker) {
|
||||
// versionIdMarker should always come with keyMarker
|
||||
// but may not be the other way around
|
||||
params[1].gt =
|
||||
DbPrefixes.Version +
|
||||
this.parameters.keyMarker +
|
||||
VID_SEP +
|
||||
this.parameters.versionIdMarker;
|
||||
} else {
|
||||
mParams.lt = inc(DbPrefixes.Master);
|
||||
vParams.lt = inc(DbPrefixes.Version);
|
||||
params[1].gt =
|
||||
DbPrefixes.Version +
|
||||
inc(this.parameters.keyMarker + VID_SEP);
|
||||
}
|
||||
return [mParams, vParams];
|
||||
}
|
||||
|
||||
/**
|
||||
* check if the max keys count has been reached and set the
|
||||
* final state of the result if it is the case
|
||||
* @return {Boolean} - indicates if the iteration has to stop
|
||||
*/
|
||||
_reachedMaxKeys(): boolean {
|
||||
if (this.keys >= this.maxKeys) {
|
||||
// In cases of maxKeys <= 0 -> IsTruncated = false
|
||||
this.IsTruncated = this.maxKeys > 0;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
return params;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -219,122 +143,44 @@ export class DelimiterVersions extends Extension {
|
|||
* * -1 if master key < version key
|
||||
* * 1 if master key > version key
|
||||
*/
|
||||
compareObjects(masterObj, versionObj) {
|
||||
compareObjects(masterObj, versionObj): number {
|
||||
const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
|
||||
const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
|
||||
return masterKey < versionKey ? -1 : 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a listing key into its nonversioned key and version ID components
|
||||
*
|
||||
* @param {string} key - full listing key
|
||||
* @return {object} obj
|
||||
* @return {string} obj.key - nonversioned part of key
|
||||
* @return {string} [obj.versionId] - version ID in the key
|
||||
*/
|
||||
parseKey(fullKey: string): { key: string, versionId ?: string } {
|
||||
const versionIdIndex = fullKey.indexOf(VID_SEP);
|
||||
if (versionIdIndex === -1) {
|
||||
return { key: fullKey };
|
||||
}
|
||||
const nonversionedKey: string = fullKey.slice(0, versionIdIndex);
|
||||
let versionId: string = fullKey.slice(versionIdIndex + 1);
|
||||
return { key: nonversionedKey, versionId };
|
||||
}
|
||||
|
||||
/**
|
||||
* Include a key in the listing output, in the Versions or CommonPrefix result
|
||||
*
|
||||
* @param {string} key - key (without version ID)
|
||||
* @param {string} versionId - version ID
|
||||
* @param {string} value - metadata value
|
||||
* @return {undefined}
|
||||
*/
|
||||
addKey(key: string, versionId: string, value: string) {
|
||||
// add the subprefix to the common prefixes if the key has the delimiter
|
||||
const commonPrefix = this.getCommonPrefix(key);
|
||||
if (commonPrefix) {
|
||||
this.addCommonPrefix(commonPrefix);
|
||||
// transition into SkippingPrefix state to skip all following keys
|
||||
// while they start with the same prefix
|
||||
this.setState(<DelimiterVersionsFilterState_SkippingPrefix> {
|
||||
id: DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||
prefix: commonPrefix,
|
||||
});
|
||||
} else {
|
||||
this.addVersion(key, versionId, value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a (key, versionId, value) tuple to the listing.
|
||||
* Set the NextMarker to the current key
|
||||
* Increment the keys counter
|
||||
* @param {String} key - The key to add
|
||||
* @param {String} versionId - versionId
|
||||
* @param {String} value - The value of the key
|
||||
* @return {undefined}
|
||||
* @param {object} obj - the entry to add to the listing result
|
||||
* @param {String} obj.key - The key to add
|
||||
* @param {String} obj.versionId - versionId
|
||||
* @param {String} obj.value - The value of the key
|
||||
* @return {Boolean} - indicates if iteration should continue
|
||||
*/
|
||||
addVersion(key: string, versionId: string, value: string) {
|
||||
this.Versions.push({
|
||||
key,
|
||||
versionId,
|
||||
value: this.trimMetadata(value),
|
||||
addContents(obj: {
|
||||
key: string;
|
||||
versionId: string;
|
||||
value: string;
|
||||
}): boolean {
|
||||
if (this._reachedMaxKeys()) {
|
||||
return FILTER_END;
|
||||
}
|
||||
this.Contents.push({
|
||||
key: obj.key,
|
||||
value: this.trimMetadata(obj.value),
|
||||
versionId: obj.versionId,
|
||||
});
|
||||
this.nextKeyMarker = key;
|
||||
this.nextVersionIdMarker = versionId;
|
||||
this.NextMarker = obj.key;
|
||||
this.NextVersionIdMarker = obj.versionId;
|
||||
++this.keys;
|
||||
}
|
||||
|
||||
getCommonPrefix(key: string): string | undefined {
|
||||
if (!this.delimiter) {
|
||||
return undefined;
|
||||
}
|
||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
||||
if (delimiterIndex === -1) {
|
||||
return undefined;
|
||||
}
|
||||
return key.substring(0, delimiterIndex + this.delimiter.length);
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a Common Prefix in the list
|
||||
* @param {String} commonPrefix - common prefix to add
|
||||
* @return {undefined}
|
||||
*/
|
||||
addCommonPrefix(commonPrefix: string): void {
|
||||
// add the new prefix to the list
|
||||
this.CommonPrefixes.push(commonPrefix);
|
||||
++this.keys;
|
||||
this.nextKeyMarker = commonPrefix;
|
||||
this.nextVersionIdMarker = undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache the current null key, to save it for outputting it later at
|
||||
* the correct position
|
||||
*
|
||||
* @param {String} key - nonversioned key of the null key
|
||||
* @param {String} versionId - real version ID of the null key
|
||||
* @param {String} value - value of the null key
|
||||
* @return {undefined}
|
||||
*/
|
||||
cacheNullKey(key: string, versionId: string, value: string): void {
|
||||
this.nullKey = { key, versionId, value };
|
||||
}
|
||||
|
||||
getObjectKeyV0(obj: { key: string }): string {
|
||||
return obj.key;
|
||||
}
|
||||
|
||||
getObjectKeyV1(obj: { key: string }): string {
|
||||
return obj.key.slice(DbPrefixes.Master.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter to apply on each iteration, based on:
|
||||
* Filter to apply on each iteration if bucket is in v0
|
||||
* versioning key format, based on:
|
||||
* - prefix
|
||||
* - delimiter
|
||||
* - maxKeys
|
||||
|
@ -344,147 +190,100 @@ export class DelimiterVersions extends Extension {
|
|||
* @param {String} obj.value - The value of the element
|
||||
* @return {number} - indicates if iteration should continue
|
||||
*/
|
||||
filter(obj: { key: string, value: string }): FilterReturnValue {
|
||||
const key = this.getObjectKey(obj);
|
||||
const value = obj.value;
|
||||
|
||||
const { key: nonversionedKey, versionId: keyVersionId } = this.parseKey(key);
|
||||
if (this.nullKey) {
|
||||
if (this.nullKey.key !== nonversionedKey
|
||||
|| this.nullKey.versionId < <string> keyVersionId) {
|
||||
this.handleKey(
|
||||
this.nullKey.key, this.nullKey.versionId, this.nullKey.value);
|
||||
this.nullKey = null;
|
||||
}
|
||||
}
|
||||
if (keyVersionId === '') {
|
||||
// null key
|
||||
this.cacheNullKey(nonversionedKey, Version.from(value).getVersionId(), value);
|
||||
if (this.state.id === DelimiterVersionsFilterStateId.SkippingVersions) {
|
||||
filterV0(obj: { key: string; value: string }): number {
|
||||
if (obj.key.startsWith(DbPrefixes.Replay)) {
|
||||
this.inReplayPrefix = true;
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
this.inReplayPrefix = false;
|
||||
|
||||
if (Version.isPHD(obj.value)) {
|
||||
// return accept to avoid skipping the next values in range
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
return this.handleKey(nonversionedKey, keyVersionId, value);
|
||||
return this.filterCommon(obj.key, obj.value);
|
||||
}
|
||||
|
||||
setState(state: FilterState): void {
|
||||
this.state = state;
|
||||
/**
|
||||
* Filter to apply on each iteration if bucket is in v1
|
||||
* versioning key format, based on:
|
||||
* - prefix
|
||||
* - delimiter
|
||||
* - maxKeys
|
||||
* The marker is being handled directly by levelDB
|
||||
* @param {Object} obj - The key and value of the element
|
||||
* @param {String} obj.key - The key of the element
|
||||
* @param {String} obj.value - The value of the element
|
||||
* @return {number} - indicates if iteration should continue
|
||||
*/
|
||||
filterV1(obj: { key: string; value: string }): number {
|
||||
// this function receives both M and V keys, but their prefix
|
||||
// length is the same so we can remove their prefix without
|
||||
// looking at the type of key
|
||||
return this.filterCommon(
|
||||
obj.key.slice(DbPrefixes.Master.length),
|
||||
obj.value
|
||||
);
|
||||
}
|
||||
|
||||
setKeyHandler(stateId: number, keyHandler: KeyHandler): void {
|
||||
this.keyHandlers[stateId] = keyHandler;
|
||||
}
|
||||
|
||||
handleKey(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||
return this.keyHandlers[this.state.id](key, versionId, value);
|
||||
}
|
||||
|
||||
keyHandler_NotSkippingV0(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||
if (key.startsWith(DbPrefixes.Replay)) {
|
||||
// skip internal replay prefix entirely
|
||||
this.setState(<DelimiterVersionsFilterState_SkippingPrefix> {
|
||||
id: DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||
prefix: DbPrefixes.Replay,
|
||||
});
|
||||
filterCommon(key, value) {
|
||||
if (this.prefix && !key.startsWith(this.prefix)) {
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
if (Version.isPHD(value)) {
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
return this.filter_onNewKey(key, versionId, value);
|
||||
}
|
||||
|
||||
keyHandler_NotSkippingV1(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||
// NOTE: this check on PHD is only useful for Artesca, S3C
|
||||
// does not use PHDs in V1 format
|
||||
if (Version.isPHD(value)) {
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
return this.filter_onNewKey(key, versionId, value);
|
||||
}
|
||||
|
||||
filter_onNewKey(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||
if (this._reachedMaxKeys()) {
|
||||
return FILTER_END;
|
||||
}
|
||||
if (versionId === undefined) {
|
||||
let nonversionedKey;
|
||||
let versionId = undefined;
|
||||
const versionIdIndex = key.indexOf(VID_SEP);
|
||||
if (versionIdIndex < 0) {
|
||||
nonversionedKey = key;
|
||||
this.masterKey = key;
|
||||
this.masterVersionId = Version.from(value).getVersionId() || 'null';
|
||||
this.addKey(this.masterKey, this.masterVersionId, value);
|
||||
versionId = this.masterVersionId;
|
||||
} else {
|
||||
if (this.masterKey === key && this.masterVersionId === versionId) {
|
||||
// do not add a version key if it is the master version
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
this.addKey(key, versionId, value);
|
||||
}
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
keyHandler_SkippingPrefix(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||
const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state;
|
||||
if (key.startsWith(prefix)) {
|
||||
nonversionedKey = key.slice(0, versionIdIndex);
|
||||
versionId = key.slice(versionIdIndex + 1);
|
||||
// skip a version key if it is the master version
|
||||
if (
|
||||
this.masterKey === nonversionedKey &&
|
||||
this.masterVersionId === versionId
|
||||
) {
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
this.setState(<DelimiterVersionsFilterState_NotSkipping> {
|
||||
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||
});
|
||||
return this.handleKey(key, versionId, value);
|
||||
this.masterKey = undefined;
|
||||
this.masterVersionId = undefined;
|
||||
}
|
||||
|
||||
keyHandler_SkippingVersions(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||
if (key === this.keyMarker) {
|
||||
// since the nonversioned key equals the marker, there is
|
||||
// necessarily a versionId in this key
|
||||
const _versionId = <string> versionId;
|
||||
if (_versionId < this.versionIdMarker) {
|
||||
// skip all versions until marker
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
if (_versionId === this.versionIdMarker) {
|
||||
// nothing left to skip, so return ACCEPT, but don't add this version
|
||||
return FILTER_ACCEPT;
|
||||
if (this.delimiter) {
|
||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||
const delimiterIndex = nonversionedKey.indexOf(
|
||||
this.delimiter,
|
||||
baseIndex
|
||||
);
|
||||
if (delimiterIndex >= 0) {
|
||||
return this.addCommonPrefix(nonversionedKey, delimiterIndex);
|
||||
}
|
||||
}
|
||||
this.setState(<DelimiterVersionsFilterState_NotSkipping> {
|
||||
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||
});
|
||||
return this.handleKey(key, versionId, value);
|
||||
}
|
||||
|
||||
skippingBase(): string | undefined {
|
||||
switch (this.state.id) {
|
||||
case DelimiterVersionsFilterStateId.SkippingPrefix:
|
||||
const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state;
|
||||
return inc(prefix);
|
||||
|
||||
case DelimiterVersionsFilterStateId.SkippingVersions:
|
||||
const { gt } = <DelimiterVersionsFilterState_SkippingVersions> this.state;
|
||||
// the contract of skipping() is to return the first key
|
||||
// that can be skipped to, so adding a null byte to skip
|
||||
// over the existing versioned key set in 'gt'
|
||||
return `${gt}\0`;
|
||||
|
||||
default:
|
||||
return SKIP_NONE;
|
||||
}
|
||||
return this.addContents({ key: nonversionedKey, value, versionId });
|
||||
}
|
||||
|
||||
skippingV0() {
|
||||
return this.skippingBase();
|
||||
if (this.inReplayPrefix) {
|
||||
return DbPrefixes.Replay;
|
||||
}
|
||||
if (this.NextMarker) {
|
||||
const index = this.NextMarker.lastIndexOf(this.delimiter);
|
||||
if (index === this.NextMarker.length - 1) {
|
||||
return this.NextMarker;
|
||||
}
|
||||
}
|
||||
return SKIP_NONE;
|
||||
}
|
||||
|
||||
skippingV1() {
|
||||
const skipTo = this.skippingBase();
|
||||
if (skipTo === SKIP_NONE) {
|
||||
const skipV0 = this.skippingV0();
|
||||
if (skipV0 === SKIP_NONE) {
|
||||
return SKIP_NONE;
|
||||
}
|
||||
// skip to the same object key in both M and V range listings
|
||||
return [
|
||||
`${DbPrefixes.Master}${skipTo}`,
|
||||
`${DbPrefixes.Version}${skipTo}`,
|
||||
];
|
||||
return [DbPrefixes.Master + skipV0, DbPrefixes.Version + skipV0];
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -494,42 +293,19 @@ export class DelimiterVersions extends Extension {
|
|||
* @return {Object} - following amazon format
|
||||
*/
|
||||
result() {
|
||||
// Add the last null key if still in cache (when it is the
|
||||
// last version of the last key)
|
||||
//
|
||||
// NOTE: _reachedMaxKeys sets IsTruncated to true when it
|
||||
// returns true. Here we want this because either:
|
||||
//
|
||||
// - we did not reach the max keys yet so the result is not
|
||||
// - truncated, and there is still room for the null key in
|
||||
// - the results
|
||||
//
|
||||
// - OR we reached it already while having to process a new
|
||||
// key (so the result is truncated even without the null key)
|
||||
//
|
||||
// - OR we are *just* below the limit but the null key to add
|
||||
// does not fit, so we know the result is now truncated
|
||||
// because there remains the null key to be output.
|
||||
//
|
||||
if (this.nullKey) {
|
||||
this.handleKey(this.nullKey.key, this.nullKey.versionId, this.nullKey.value);
|
||||
}
|
||||
const result: ResultObject = {
|
||||
/* NextMarker is only provided when delimiter is used.
|
||||
* specified in v1 listing documentation
|
||||
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
|
||||
*/
|
||||
return {
|
||||
CommonPrefixes: this.CommonPrefixes,
|
||||
Versions: this.Versions,
|
||||
Versions: this.Contents,
|
||||
IsTruncated: this.IsTruncated,
|
||||
NextKeyMarker: this.IsTruncated ? this.NextMarker : undefined,
|
||||
NextVersionIdMarker: this.IsTruncated
|
||||
? this.NextVersionIdMarker
|
||||
: undefined,
|
||||
Delimiter: this.delimiter,
|
||||
};
|
||||
if (this.delimiter) {
|
||||
result.Delimiter = this.delimiter;
|
||||
}
|
||||
if (this.IsTruncated) {
|
||||
result.NextKeyMarker = this.nextKeyMarker;
|
||||
if (this.nextVersionIdMarker) {
|
||||
result.NextVersionIdMarker = this.nextVersionIdMarker;
|
||||
}
|
||||
};
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { DelimiterVersions };
|
||||
|
|
|
@ -1,12 +0,0 @@
|
|||
module.exports = {
|
||||
Basic: require('./basic').List,
|
||||
Delimiter: require('./delimiter').Delimiter,
|
||||
DelimiterVersions: require('./delimiterVersions')
|
||||
.DelimiterVersions,
|
||||
DelimiterMaster: require('./delimiterMaster')
|
||||
.DelimiterMaster,
|
||||
MPU: require('./MPU').MultipartUploads,
|
||||
DelimiterCurrent: require('./delimiterCurrent').DelimiterCurrent,
|
||||
DelimiterNonCurrent: require('./delimiterNonCurrent').DelimiterNonCurrent,
|
||||
DelimiterOrphanDeleteMarker: require('./delimiterOrphanDeleteMarker').DelimiterOrphanDeleteMarker,
|
||||
};
|
|
@ -0,0 +1,5 @@
|
|||
export { List as Basic } from './basic';
|
||||
export { Delimiter } from './delimiter';
|
||||
export { DelimiterVersions } from './delimiterVersions';
|
||||
export { DelimiterMaster } from './delimiterMaster';
|
||||
export { MultipartUploads as MPU } from './MPU';
|
|
@ -1,21 +1,25 @@
|
|||
const assert = require('assert');
|
||||
|
||||
const { FILTER_END, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
||||
|
||||
import assert from 'assert';
|
||||
import { FILTER_END, FILTER_SKIP, SKIP_NONE } from './tools';
|
||||
|
||||
const MAX_STREAK_LENGTH = 100;
|
||||
|
||||
/**
|
||||
* Handle the filtering and the skip mechanism of a listing result.
|
||||
*/
|
||||
class Skip {
|
||||
export class Skip {
|
||||
extension;
|
||||
gteParams;
|
||||
listingEndCb;
|
||||
skipRangeCb;
|
||||
streakLength;
|
||||
|
||||
/**
|
||||
* @param {Object} params - skip parameters
|
||||
* @param {Object} params.extension - delimiter extension used (required)
|
||||
* @param {String} params.gte - current range gte (greater than or
|
||||
* equal) used by the client code
|
||||
*/
|
||||
constructor(params) {
|
||||
constructor(params: { extension: any; gte: string }) {
|
||||
assert(params.extension);
|
||||
|
||||
this.extension = params.extension;
|
||||
|
@ -47,27 +51,22 @@ class Skip {
|
|||
* This function calls the listing end or the skip range callbacks if
|
||||
* needed.
|
||||
*/
|
||||
filter(entry) {
|
||||
filter(entry): void {
|
||||
assert(this.listingEndCb);
|
||||
assert(this.skipRangeCb);
|
||||
|
||||
const filteringResult = this.extension.filter(entry);
|
||||
const skipTo = this.extension.skipping();
|
||||
const skippingRange = this.extension.skipping();
|
||||
|
||||
if (filteringResult === FILTER_END) {
|
||||
this.listingEndCb();
|
||||
} else if (filteringResult === FILTER_SKIP
|
||||
&& skipTo !== SKIP_NONE) {
|
||||
} else if (
|
||||
filteringResult === FILTER_SKIP &&
|
||||
skippingRange !== SKIP_NONE
|
||||
) {
|
||||
if (++this.streakLength >= MAX_STREAK_LENGTH) {
|
||||
let newRange;
|
||||
if (Array.isArray(skipTo)) {
|
||||
newRange = [];
|
||||
for (let i = 0; i < skipTo.length; ++i) {
|
||||
newRange.push(skipTo[i]);
|
||||
}
|
||||
} else {
|
||||
newRange = skipTo;
|
||||
}
|
||||
const newRange = this._inc(skippingRange);
|
||||
|
||||
/* Avoid to loop on the same range again and again. */
|
||||
if (newRange === this.gteParams) {
|
||||
this.streakLength = 1;
|
||||
|
@ -79,7 +78,14 @@ class Skip {
|
|||
this.streakLength = 0;
|
||||
}
|
||||
}
|
||||
|
||||
_inc(str: string) {
|
||||
if (!str) {
|
||||
return str;
|
||||
}
|
||||
const lastCharValue = str.charCodeAt(str.length - 1);
|
||||
const lastCharNewValue = String.fromCharCode(lastCharValue + 1);
|
||||
|
||||
|
||||
module.exports = Skip;
|
||||
return `${str.slice(0, str.length - 1)}${lastCharNewValue}`;
|
||||
}
|
||||
}
|
|
@ -1,10 +1,11 @@
|
|||
const { DbPrefixes } = require('../../versioning/constants').VersioningConstants;
|
||||
import { VersioningConstants as VSConst } from '../../versioning/constants';
|
||||
const { DbPrefixes } = VSConst
|
||||
|
||||
// constants for extensions
|
||||
const SKIP_NONE = undefined; // to be inline with the values of NextMarker
|
||||
const FILTER_ACCEPT = 1;
|
||||
const FILTER_SKIP = 0;
|
||||
const FILTER_END = -1;
|
||||
export const SKIP_NONE = undefined; // to be inline with the values of NextMarker
|
||||
export const FILTER_ACCEPT = 1;
|
||||
export const FILTER_SKIP = 0;
|
||||
export const FILTER_END = -1;
|
||||
|
||||
/**
|
||||
* This function check if number is valid
|
||||
|
@ -15,8 +16,8 @@ const FILTER_END = -1;
|
|||
* @param {Number} limit - The limit to respect
|
||||
* @return {Number} - The parsed number || limit
|
||||
*/
|
||||
function checkLimit(number, limit) {
|
||||
const parsed = Number.parseInt(number, 10);
|
||||
export function checkLimit(number: number, limit: number): number {
|
||||
const parsed = Number.parseInt(number, 10)
|
||||
const valid = !Number.isNaN(parsed) && (!limit || parsed <= limit);
|
||||
return valid ? parsed : limit;
|
||||
}
|
||||
|
@ -28,7 +29,7 @@ function checkLimit(number, limit) {
|
|||
* @return {string} - the incremented string
|
||||
* or the input if it is not valid
|
||||
*/
|
||||
function inc(str) {
|
||||
export function inc(str: string): string {
|
||||
return str ? (str.slice(0, str.length - 1) +
|
||||
String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str;
|
||||
}
|
||||
|
@ -40,7 +41,7 @@ function inc(str) {
|
|||
* @param {object} v0params - listing parameters for v0 format
|
||||
* @return {object} - listing parameters for v1 format
|
||||
*/
|
||||
function listingParamsMasterKeysV0ToV1(v0params) {
|
||||
export function listingParamsMasterKeysV0ToV1(v0params: any): any {
|
||||
const v1params = Object.assign({}, v0params);
|
||||
if (v0params.gt !== undefined) {
|
||||
v1params.gt = `${DbPrefixes.Master}${v0params.gt}`;
|
||||
|
@ -58,13 +59,3 @@ function listingParamsMasterKeysV0ToV1(v0params) {
|
|||
}
|
||||
return v1params;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
checkLimit,
|
||||
inc,
|
||||
listingParamsMasterKeysV0ToV1,
|
||||
SKIP_NONE,
|
||||
FILTER_END,
|
||||
FILTER_SKIP,
|
||||
FILTER_ACCEPT,
|
||||
};
|
|
@ -1,4 +1,4 @@
|
|||
function indexOf(arr, value) {
|
||||
export function indexOf<T>(arr: T[], value: T) {
|
||||
if (!arr.length) {
|
||||
return -1;
|
||||
}
|
||||
|
@ -22,10 +22,10 @@ function indexOf(arr, value) {
|
|||
return -1;
|
||||
}
|
||||
|
||||
function indexAtOrBelow(arr, value) {
|
||||
let i;
|
||||
let lo;
|
||||
let hi;
|
||||
export function indexAtOrBelow<T>(arr: T[], value: T) {
|
||||
let i: number;
|
||||
let lo: number;
|
||||
let hi: number;
|
||||
|
||||
if (!arr.length || arr[0] > value) {
|
||||
return -1;
|
||||
|
@ -52,7 +52,7 @@ function indexAtOrBelow(arr, value) {
|
|||
/*
|
||||
* perform symmetric diff in O(m + n)
|
||||
*/
|
||||
function symDiff(k1, k2, v1, v2, cb) {
|
||||
export function symDiff(k1, k2, v1, v2, cb) {
|
||||
let i = 0;
|
||||
let j = 0;
|
||||
const n = k1.length;
|
||||
|
@ -79,9 +79,3 @@ function symDiff(k1, k2, v1, v2, cb) {
|
|||
j++;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
indexOf,
|
||||
indexAtOrBelow,
|
||||
symDiff,
|
||||
};
|
|
@ -1,13 +1,12 @@
|
|||
const ArrayUtils = require('./ArrayUtils');
|
||||
import * as ArrayUtils from './ArrayUtils';
|
||||
|
||||
class SortedSet {
|
||||
constructor(obj) {
|
||||
if (obj) {
|
||||
this.keys = obj.keys;
|
||||
this.values = obj.values;
|
||||
} else {
|
||||
this.clear();
|
||||
}
|
||||
export default class SortedSet<Key, Value> {
|
||||
keys: Key[];
|
||||
values: Value[];
|
||||
|
||||
constructor(obj?: { keys: Key[]; values: Value[] }) {
|
||||
this.keys = obj?.keys ?? [];
|
||||
this.values = obj?.values ?? [];
|
||||
}
|
||||
|
||||
clear() {
|
||||
|
@ -19,7 +18,7 @@ class SortedSet {
|
|||
return this.keys.length;
|
||||
}
|
||||
|
||||
set(key, value) {
|
||||
set(key: Key, value: Value) {
|
||||
const index = ArrayUtils.indexAtOrBelow(this.keys, key);
|
||||
if (this.keys[index] === key) {
|
||||
this.values[index] = value;
|
||||
|
@ -29,17 +28,17 @@ class SortedSet {
|
|||
this.values.splice(index + 1, 0, value);
|
||||
}
|
||||
|
||||
isSet(key) {
|
||||
isSet(key: Key) {
|
||||
const index = ArrayUtils.indexOf(this.keys, key);
|
||||
return index >= 0;
|
||||
}
|
||||
|
||||
get(key) {
|
||||
get(key: Key) {
|
||||
const index = ArrayUtils.indexOf(this.keys, key);
|
||||
return index >= 0 ? this.values[index] : undefined;
|
||||
}
|
||||
|
||||
del(key) {
|
||||
del(key: Key) {
|
||||
const index = ArrayUtils.indexOf(this.keys, key);
|
||||
if (index >= 0) {
|
||||
this.keys.splice(index, 1);
|
||||
|
@ -47,5 +46,3 @@ class SortedSet {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SortedSet;
|
|
@ -1,7 +1,17 @@
|
|||
const stream = require('stream');
|
||||
import stream from 'stream';
|
||||
|
||||
class MergeStream extends stream.Readable {
|
||||
constructor(stream1, stream2, compare) {
|
||||
export default class MergeStream extends stream.Readable {
|
||||
_compare: (a: any, b: any) => number;
|
||||
_streams: [stream.Readable, stream.Readable];
|
||||
_peekItems: [undefined | null, undefined | null];
|
||||
_streamEof: [boolean, boolean];
|
||||
_streamToResume: stream.Readable | null;
|
||||
|
||||
constructor(
|
||||
stream1: stream.Readable,
|
||||
stream2: stream.Readable,
|
||||
compare: (a: any, b: any) => number
|
||||
) {
|
||||
super({ objectMode: true });
|
||||
|
||||
this._compare = compare;
|
||||
|
@ -16,13 +26,13 @@ class MergeStream extends stream.Readable {
|
|||
this._streamEof = [false, false];
|
||||
this._streamToResume = null;
|
||||
|
||||
stream1.on('data', item => this._onItem(stream1, item, 0, 1));
|
||||
stream1.on('data', (item) => this._onItem(stream1, item, 0, 1));
|
||||
stream1.once('end', () => this._onEnd(stream1, 0, 1));
|
||||
stream1.once('error', err => this._onError(stream1, err, 0, 1));
|
||||
stream1.once('error', (err) => this._onError(stream1, err, 0, 1));
|
||||
|
||||
stream2.on('data', item => this._onItem(stream2, item, 1, 0));
|
||||
stream2.on('data', (item) => this._onItem(stream2, item, 1, 0));
|
||||
stream2.once('end', () => this._onEnd(stream2, 1, 0));
|
||||
stream2.once('error', err => this._onError(stream2, err, 1, 0));
|
||||
stream2.once('error', (err) => this._onError(stream2, err, 1, 0));
|
||||
}
|
||||
|
||||
_read() {
|
||||
|
@ -41,7 +51,7 @@ class MergeStream extends stream.Readable {
|
|||
callback();
|
||||
}
|
||||
|
||||
_onItem(myStream, myItem, myIndex, otherIndex) {
|
||||
_onItem(myStream: stream.Readable, myItem, myIndex, otherIndex) {
|
||||
this._peekItems[myIndex] = myItem;
|
||||
const otherItem = this._peekItems[otherIndex];
|
||||
if (otherItem === undefined) {
|
||||
|
@ -69,7 +79,7 @@ class MergeStream extends stream.Readable {
|
|||
return undefined;
|
||||
}
|
||||
|
||||
_onEnd(myStream, myIndex, otherIndex) {
|
||||
_onEnd(myStream: stream.Readable, myIndex, otherIndex) {
|
||||
this._streamEof[myIndex] = true;
|
||||
if (this._peekItems[myIndex] === undefined) {
|
||||
this._peekItems[myIndex] = null;
|
||||
|
@ -94,7 +104,7 @@ class MergeStream extends stream.Readable {
|
|||
return otherStream.resume();
|
||||
}
|
||||
|
||||
_onError(myStream, err, myIndex, otherIndex) {
|
||||
_onError(myStream: stream.Readable, err, myIndex, otherIndex) {
|
||||
myStream.destroy();
|
||||
if (this._streams[otherIndex]) {
|
||||
this._streams[otherIndex].destroy();
|
||||
|
@ -102,5 +112,3 @@ class MergeStream extends stream.Readable {
|
|||
this.emit('error', err);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MergeStream;
|
|
@ -54,8 +54,7 @@ export default class AuthInfo {
|
|||
return this.canonicalID === constants.publicId;
|
||||
}
|
||||
isRequesterAServiceAccount() {
|
||||
return this.canonicalID.startsWith(
|
||||
`${constants.zenkoServiceAccount}/`);
|
||||
return this.canonicalID.startsWith(`${constants.zenkoServiceAccount}/`);
|
||||
}
|
||||
isRequesterThisServiceAccount(serviceName: string) {
|
||||
const computedCanonicalID = `${constants.zenkoServiceAccount}/${serviceName}`;
|
||||
|
|
|
@ -14,7 +14,7 @@ function vaultSignatureCb(
|
|||
err: Error | null,
|
||||
authInfo: { message: { body: any } },
|
||||
log: Logger,
|
||||
callback: (err: Error | null, data?: any, results?: any, params?: any, infos?: any) => void,
|
||||
callback: (err: Error | null, data?: any, results?: any, params?: any) => void,
|
||||
streamingV4Params?: any
|
||||
) {
|
||||
// vaultclient API guarantees that it returns:
|
||||
|
@ -38,30 +38,9 @@ function vaultSignatureCb(
|
|||
}
|
||||
// @ts-ignore
|
||||
log.addDefaultFields(auditLog);
|
||||
return callback(null, userInfo, authorizationResults, streamingV4Params, {
|
||||
accountQuota: info.accountQuota || {},
|
||||
});
|
||||
return callback(null, userInfo, authorizationResults, streamingV4Params);
|
||||
}
|
||||
|
||||
export type AuthV4RequestParams = {
|
||||
version: 4;
|
||||
log: Logger;
|
||||
data: {
|
||||
accessKey: string;
|
||||
signatureFromRequest: string;
|
||||
region: string;
|
||||
stringToSign: string;
|
||||
scopeDate: string;
|
||||
authType: 'query' | 'header';
|
||||
signatureVersion: string;
|
||||
signatureAge?: number;
|
||||
timestamp: number;
|
||||
credentialScope: string;
|
||||
securityToken: string;
|
||||
algo: string;
|
||||
log: Logger;
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Class that provides common authentication methods against different
|
||||
|
@ -170,8 +149,26 @@ export default class Vault {
|
|||
* @param callback - callback with either error or user info
|
||||
*/
|
||||
authenticateV4Request(
|
||||
params: AuthV4RequestParams,
|
||||
requestContexts: any[] | null,
|
||||
params: {
|
||||
version: 4;
|
||||
log: Logger;
|
||||
data: {
|
||||
accessKey: string;
|
||||
signatureFromRequest: string;
|
||||
region: string;
|
||||
stringToSign: string;
|
||||
scopeDate: string;
|
||||
authType: 'query' | 'header';
|
||||
signatureVersion: string;
|
||||
signatureAge?: number;
|
||||
timestamp: number;
|
||||
credentialScope: string;
|
||||
securityToken: string;
|
||||
algo: string;
|
||||
log: Logger;
|
||||
};
|
||||
},
|
||||
requestContexts: any[],
|
||||
callback: (err: Error | null, data?: any) => void
|
||||
) {
|
||||
params.log.debug('authenticating V4 request');
|
||||
|
@ -302,7 +299,7 @@ export default class Vault {
|
|||
log.trace('getting accountIds from Vault based on canonicalIDs',
|
||||
{ canonicalIDs });
|
||||
this.client.getAccountIds(canonicalIDs,
|
||||
// @ts-expect-error
|
||||
// @ts-ignore
|
||||
{ reqUid: log.getSerializedUids() },
|
||||
(err: Error | null, info?: any) => {
|
||||
if (err) {
|
||||
|
@ -386,19 +383,4 @@ export default class Vault {
|
|||
return callback(null, respBody);
|
||||
});
|
||||
}
|
||||
|
||||
report(log: Logger, callback: (err: Error | null, data?: any) => void) {
|
||||
// call the report function of the client
|
||||
if (!this.client.report) {
|
||||
return callback(null, {});
|
||||
}
|
||||
// @ts-ignore
|
||||
return this.client.report(log.getSerializedUids(), (err: Error | null, obj?: any) => {
|
||||
if (err) {
|
||||
log.debug(`error from ${this.implName}`, { error: err });
|
||||
return callback(err);
|
||||
}
|
||||
return callback(null, obj);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
107
lib/auth/auth.ts
107
lib/auth/auth.ts
|
@ -11,11 +11,11 @@ import constructStringToSignV4 from './v4/constructStringToSign';
|
|||
import { convertUTCtoISO8601 } from './v4/timeUtils';
|
||||
import * as vaultUtilities from './backends/in_memory/vaultUtilities';
|
||||
import * as inMemoryBackend from './backends/in_memory/Backend';
|
||||
import baseBackend from './backends/base';
|
||||
import chainBackend from './backends/ChainBackend';
|
||||
import validateAuthConfig from './backends/in_memory/validateAuthConfig';
|
||||
import AuthLoader from './backends/in_memory/AuthLoader';
|
||||
import Vault from './Vault';
|
||||
import baseBackend from './backends/BaseBackend';
|
||||
import chainBackend from './backends/ChainBackend';
|
||||
|
||||
let vault: Vault | null = null;
|
||||
const auth = {};
|
||||
|
@ -77,8 +77,9 @@ function extractParams(
|
|||
} else if (authHeader.startsWith('AWS4')) {
|
||||
version = 'v4';
|
||||
} else {
|
||||
log.trace('invalid authorization security header',
|
||||
{ header: authHeader });
|
||||
log.trace('invalid authorization security header', {
|
||||
header: authHeader,
|
||||
});
|
||||
return { err: errors.AccessDenied };
|
||||
}
|
||||
} else if (data.Signature) {
|
||||
|
@ -92,8 +93,10 @@ function extractParams(
|
|||
// Here, either both values are set, or none is set
|
||||
if (version !== null && method !== null) {
|
||||
if (!checkFunctions[version] || !checkFunctions[version][method]) {
|
||||
log.trace('invalid auth version or method',
|
||||
{ version, authMethod: method });
|
||||
log.trace('invalid auth version or method', {
|
||||
version,
|
||||
authMethod: method,
|
||||
});
|
||||
return { err: errors.NotImplemented };
|
||||
}
|
||||
log.trace('identified auth method', { version, authMethod: method });
|
||||
|
@ -163,20 +166,6 @@ function doAuth(
|
|||
return cb(errors.InternalError);
|
||||
}
|
||||
|
||||
/**
|
||||
* This function will generate a version 4 content-md5 header
|
||||
* It looks at the request path to determine what kind of header encoding is required
|
||||
*
|
||||
* @param path - the request path
|
||||
* @param payload - the request payload to hash
|
||||
*/
|
||||
function generateContentMD5Header(
|
||||
path: string,
|
||||
payload: string,
|
||||
) {
|
||||
const encoding = path && path.startsWith('/_/backbeat/') ? 'hex' : 'base64';
|
||||
return crypto.createHash('md5').update(payload, 'binary').digest(encoding);
|
||||
}
|
||||
/**
|
||||
* This function will generate a version 4 header
|
||||
*
|
||||
|
@ -189,7 +178,6 @@ function generateContentMD5Header(
|
|||
* @param [proxyPath] - path that gets proxied by reverse proxy
|
||||
* @param [sessionToken] - security token if the access/secret keys
|
||||
* are temporary credentials from STS
|
||||
* @param [payload] - body of the request if any
|
||||
*/
|
||||
function generateV4Headers(
|
||||
request: any,
|
||||
|
@ -197,9 +185,8 @@ function generateV4Headers(
|
|||
accessKey: string,
|
||||
secretKeyValue: string,
|
||||
awsService: string,
|
||||
proxyPath?: string,
|
||||
sessionToken?: string,
|
||||
payload?: string,
|
||||
proxyPath: string,
|
||||
sessionToken: string
|
||||
) {
|
||||
Object.assign(request, { headers: {} });
|
||||
const amzDate = convertUTCtoISO8601(Date.now());
|
||||
|
@ -207,23 +194,23 @@ function generateV4Headers(
|
|||
const scopeDate = amzDate.slice(0, amzDate.indexOf('T'));
|
||||
const region = 'us-east-1';
|
||||
const service = awsService || 'iam';
|
||||
const credentialScope =
|
||||
`${scopeDate}/${region}/${service}/aws4_request`;
|
||||
const credentialScope = `${scopeDate}/${region}/${service}/aws4_request`;
|
||||
const timestamp = amzDate;
|
||||
const algorithm = 'AWS4-HMAC-SHA256';
|
||||
|
||||
payload = payload || '';
|
||||
let payload = '';
|
||||
if (request.method === 'POST') {
|
||||
payload = queryString.stringify(data, undefined, undefined, {
|
||||
encodeURIComponent,
|
||||
});
|
||||
}
|
||||
const payloadChecksum = crypto.createHash('sha256')
|
||||
.update(payload, 'binary').digest('hex');
|
||||
const payloadChecksum = crypto
|
||||
.createHash('sha256')
|
||||
.update(payload, 'binary')
|
||||
.digest('hex');
|
||||
request.setHeader('host', request._headers.host);
|
||||
request.setHeader('x-amz-date', amzDate);
|
||||
request.setHeader('x-amz-content-sha256', payloadChecksum);
|
||||
request.setHeader('content-md5', generateContentMD5Header(request.path, payload));
|
||||
|
||||
if (sessionToken) {
|
||||
request.setHeader('x-amz-security-token', sessionToken);
|
||||
|
@ -231,35 +218,49 @@ function generateV4Headers(
|
|||
|
||||
Object.assign(request.headers, request._headers);
|
||||
const signedHeaders = Object.keys(request._headers)
|
||||
.filter(headerName =>
|
||||
headerName.startsWith('x-amz-')
|
||||
|| headerName.startsWith('x-scal-')
|
||||
|| headerName === 'content-md5'
|
||||
|| headerName === 'host',
|
||||
).sort().join(';');
|
||||
const params = { request, signedHeaders, payloadChecksum,
|
||||
credentialScope, timestamp, query: data,
|
||||
awsService: service, proxyPath };
|
||||
.filter(
|
||||
(headerName) =>
|
||||
headerName.startsWith('x-amz-') ||
|
||||
headerName.startsWith('x-scal-') ||
|
||||
headerName === 'host'
|
||||
)
|
||||
.sort()
|
||||
.join(';');
|
||||
const params = {
|
||||
request,
|
||||
signedHeaders,
|
||||
payloadChecksum,
|
||||
credentialScope,
|
||||
timestamp,
|
||||
query: data,
|
||||
awsService: service,
|
||||
proxyPath,
|
||||
};
|
||||
const stringToSign = constructStringToSignV4(params);
|
||||
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue,
|
||||
const signingKey = vaultUtilities.calculateSigningKey(
|
||||
secretKeyValue,
|
||||
region,
|
||||
scopeDate,
|
||||
service);
|
||||
const signature = crypto.createHmac('sha256', signingKey)
|
||||
.update(stringToSign as string, 'binary').digest('hex');
|
||||
const authorizationHeader = `${algorithm} Credential=${accessKey}` +
|
||||
service
|
||||
);
|
||||
const signature = crypto
|
||||
.createHmac('sha256', signingKey)
|
||||
.update(stringToSign, 'binary')
|
||||
.digest('hex');
|
||||
const authorizationHeader =
|
||||
`${algorithm} Credential=${accessKey}` +
|
||||
`/${credentialScope}, SignedHeaders=${signedHeaders}, ` +
|
||||
`Signature=${signature}`;
|
||||
request.setHeader('authorization', authorizationHeader);
|
||||
Object.assign(request, { headers: {} });
|
||||
}
|
||||
|
||||
export const server = { extractParams, doAuth }
|
||||
export const client = { generateV4Headers, constructStringToSignV2 }
|
||||
export const inMemory = { backend: inMemoryBackend, validateAuthConfig, AuthLoader }
|
||||
export const backends = { baseBackend, chainBackend }
|
||||
export {
|
||||
setAuthHandler as setHandler,
|
||||
AuthInfo,
|
||||
Vault
|
||||
}
|
||||
export const server = { extractParams, doAuth };
|
||||
export const client = { generateV4Headers, constructStringToSignV2 };
|
||||
export const inMemory = {
|
||||
backend: inMemoryBackend,
|
||||
validateAuthConfig,
|
||||
AuthLoader,
|
||||
};
|
||||
export const backends = { baseBackend, chainBackend };
|
||||
export { setAuthHandler as setHandler, AuthInfo, Vault };
|
||||
|
|
|
@ -0,0 +1,70 @@
|
|||
import errors from '../../errors';
|
||||
import { Callback } from './in_memory/types';
|
||||
|
||||
/** Base backend class */
|
||||
export default class BaseBackend {
|
||||
service: string;
|
||||
|
||||
constructor(service: string) {
|
||||
this.service = service;
|
||||
}
|
||||
|
||||
verifySignatureV2(
|
||||
_stringToSign: string,
|
||||
_signatureFromRequest: string,
|
||||
_accessKey: string,
|
||||
_options: { algo: 'SHA1' | 'SHA256' },
|
||||
callback: Callback
|
||||
) {
|
||||
return callback(errors.AuthMethodNotImplemented);
|
||||
}
|
||||
|
||||
verifySignatureV4(
|
||||
_stringToSign: string,
|
||||
_signatureFromRequest: string,
|
||||
_accessKey: string,
|
||||
_region: string,
|
||||
_scopeDate: string,
|
||||
_options: any,
|
||||
callback: Callback
|
||||
) {
|
||||
return callback(errors.AuthMethodNotImplemented);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets canonical ID's for a list of accounts based on email associated
|
||||
* with account. The callback will be called with either error or object
|
||||
* with email addresses as keys and canonical IDs as values.
|
||||
*/
|
||||
getCanonicalIds(_emails: string[], _options: any, callback: Callback) {
|
||||
return callback(errors.AuthMethodNotImplemented);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets email addresses (referred to as diplay names for getACL's) for a
|
||||
* list of accounts based on canonical IDs associated with account.
|
||||
* The callback will be called with either error or an object from Vault
|
||||
* containing account canonicalID as each object key and an email address
|
||||
* as the value (or "NotFound").
|
||||
*/
|
||||
getEmailAddresses(
|
||||
_canonicalIDs: string[],
|
||||
_options: any,
|
||||
callback: Callback
|
||||
) {
|
||||
return callback(errors.AuthMethodNotImplemented);
|
||||
}
|
||||
|
||||
checkPolicies(
|
||||
_requestContextParams: any,
|
||||
_userArn: string,
|
||||
_options: any,
|
||||
callback: Callback
|
||||
) {
|
||||
return callback(null, { message: { body: [] } });
|
||||
}
|
||||
|
||||
healthcheck(_reqUid: string, callback: Callback) {
|
||||
return callback(null, { code: 200, message: 'OK' });
|
||||
}
|
||||
}
|
|
@ -1,24 +1,31 @@
|
|||
import assert from 'assert';
|
||||
import async from 'async';
|
||||
|
||||
import { Callback } from './in_memory/types';
|
||||
import errors from '../../errors';
|
||||
import BaseBackend from './base';
|
||||
import BaseBackend from './BaseBackend';
|
||||
|
||||
export type Policy = {
|
||||
[key: string]: any;
|
||||
arn?: string;
|
||||
versionId?: string;
|
||||
isAllowed: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Class that provides an authentication backend that will verify signatures
|
||||
* and retrieve emails and canonical ids associated with an account using a
|
||||
* given list of authentication backends and vault clients.
|
||||
*
|
||||
* @class ChainBackend
|
||||
*/
|
||||
export default class ChainBackend extends BaseBackend {
|
||||
_clients: any[];
|
||||
#clients: BaseBackend[];
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param {string} service - service id
|
||||
* @param {object[]} clients - list of authentication backends or vault clients
|
||||
*/
|
||||
constructor(service: string, clients: any[]) {
|
||||
constructor(service: string, clients: BaseBackend[]) {
|
||||
super(service);
|
||||
|
||||
assert(Array.isArray(clients) && clients.length > 0, 'invalid client list');
|
||||
|
@ -28,25 +35,20 @@ export default class ChainBackend extends BaseBackend {
|
|||
typeof client.getCanonicalIds === 'function' &&
|
||||
typeof client.getEmailAddresses === 'function' &&
|
||||
typeof client.checkPolicies === 'function' &&
|
||||
typeof client.healthcheck === 'function',
|
||||
typeof client.healthcheck === 'function'
|
||||
), 'invalid client: missing required auth backend methods');
|
||||
this._clients = clients;
|
||||
this.#clients = clients;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* try task against each client for one to be successful
|
||||
*/
|
||||
_tryEachClient(task: any, cb: any) {
|
||||
/** try task against each client for one to be successful */
|
||||
#tryEachClient(task: (client: BaseBackend, done?: any) => void, cb: Callback) {
|
||||
// @ts-ignore
|
||||
async.tryEach(this._clients.map(client => done => task(client, done)), cb);
|
||||
async.tryEach(this.#clients.map(client => (done: any) => task(client, done)), cb);
|
||||
}
|
||||
|
||||
/*
|
||||
* apply task to all clients
|
||||
*/
|
||||
_forEachClient(task: any, cb: any) {
|
||||
async.map(this._clients, task, cb);
|
||||
/** apply task to all clients */
|
||||
#forEachClient(task: (client: BaseBackend, done?: any) => void, cb: Callback) {
|
||||
async.map(this.#clients, task, cb);
|
||||
}
|
||||
|
||||
verifySignatureV2(
|
||||
|
@ -54,14 +56,14 @@ export default class ChainBackend extends BaseBackend {
|
|||
signatureFromRequest: string,
|
||||
accessKey: string,
|
||||
options: any,
|
||||
callback: any,
|
||||
callback: Callback
|
||||
) {
|
||||
this._tryEachClient((client, done) => client.verifySignatureV2(
|
||||
this.#tryEachClient((client, done) => client.verifySignatureV2(
|
||||
stringToSign,
|
||||
signatureFromRequest,
|
||||
accessKey,
|
||||
options,
|
||||
done,
|
||||
done
|
||||
), callback);
|
||||
}
|
||||
|
||||
|
@ -72,27 +74,32 @@ export default class ChainBackend extends BaseBackend {
|
|||
region: string,
|
||||
scopeDate: string,
|
||||
options: any,
|
||||
callback: any,
|
||||
callback: Callback
|
||||
) {
|
||||
this._tryEachClient((client, done) => client.verifySignatureV4(
|
||||
this.#tryEachClient((client, done) => client.verifySignatureV4(
|
||||
stringToSign,
|
||||
signatureFromRequest,
|
||||
accessKey,
|
||||
region,
|
||||
scopeDate,
|
||||
options,
|
||||
done,
|
||||
done
|
||||
), callback);
|
||||
}
|
||||
|
||||
static _mergeObjects(objectResponses: any) {
|
||||
static _mergeObjects(objectResponses: any[]) {
|
||||
return objectResponses.reduce(
|
||||
(retObj, resObj) => Object.assign(retObj, resObj.message.body),
|
||||
{});
|
||||
{}
|
||||
);
|
||||
}
|
||||
|
||||
getCanonicalIds(emailAddresses: string[], options: any, callback: any) {
|
||||
this._forEachClient(
|
||||
getCanonicalIds(
|
||||
emailAddresses: string[],
|
||||
options: any,
|
||||
callback: Callback<{ message: { body: any } }>
|
||||
) {
|
||||
this.#forEachClient(
|
||||
(client, done) => client.getCanonicalIds(emailAddresses, options, done),
|
||||
(err, res) => {
|
||||
if (err) {
|
||||
|
@ -104,11 +111,16 @@ export default class ChainBackend extends BaseBackend {
|
|||
body: ChainBackend._mergeObjects(res),
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
getEmailAddresses(canonicalIDs: string[], options: any, callback: any) {
|
||||
this._forEachClient(
|
||||
getEmailAddresses(
|
||||
canonicalIDs: string[],
|
||||
options: any,
|
||||
callback: Callback<{ message: { body: any } }>
|
||||
) {
|
||||
this.#forEachClient(
|
||||
(client, done) => client.getEmailAddresses(canonicalIDs, options, done),
|
||||
(err, res) => {
|
||||
if (err) {
|
||||
|
@ -119,56 +131,41 @@ export default class ChainBackend extends BaseBackend {
|
|||
body: ChainBackend._mergeObjects(res),
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/*
|
||||
* merge policy responses into a single message
|
||||
*/
|
||||
static _mergePolicies(policyResponses: any) {
|
||||
const policyMap: any = {};
|
||||
/** merge policy responses into a single message */
|
||||
static _mergePolicies(policyResponses: { message: { body: any[] } }[]) {
|
||||
const policyMap: { [key: string]: Policy } = {};
|
||||
|
||||
policyResponses.forEach(resp => {
|
||||
if (!resp.message || !Array.isArray(resp.message.body)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const check = (policy) => {
|
||||
const key = (policy.arn || '') + (policy.versionId || '') + (policy.action || '');
|
||||
resp.message.body.forEach(policy => {
|
||||
const key = (policy.arn || '') + (policy.versionId || '');
|
||||
if (!policyMap[key] || !policyMap[key].isAllowed) {
|
||||
policyMap[key] = policy;
|
||||
}
|
||||
// else is duplicate policy
|
||||
};
|
||||
|
||||
resp.message.body.forEach(policy => {
|
||||
if (Array.isArray(policy)) {
|
||||
policy.forEach(authResult => check(authResult));
|
||||
} else {
|
||||
check(policy);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return Object.keys(policyMap).map(key => {
|
||||
const policyRes: any = { isAllowed: policyMap[key].isAllowed };
|
||||
if (policyMap[key].arn !== '') {
|
||||
return Object.keys(policyMap).map((key) => {
|
||||
const policyRes: Policy = { isAllowed: policyMap[key].isAllowed };
|
||||
if (policyMap[key].arn && policyMap[key].arn !== '') {
|
||||
policyRes.arn = policyMap[key].arn;
|
||||
}
|
||||
if (policyMap[key].versionId) {
|
||||
policyRes.versionId = policyMap[key].versionId;
|
||||
}
|
||||
if (policyMap[key].isImplicit !== undefined) {
|
||||
policyRes.isImplicit = policyMap[key].isImplicit;
|
||||
}
|
||||
if (policyMap[key].action) {
|
||||
policyRes.action = policyMap[key].action;
|
||||
}
|
||||
return policyRes;
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
/**
|
||||
response format:
|
||||
{ message: {
|
||||
body: [{}],
|
||||
|
@ -176,12 +173,17 @@ export default class ChainBackend extends BaseBackend {
|
|||
message: string,
|
||||
} }
|
||||
*/
|
||||
checkPolicies(requestContextParams: any, userArn: string, options: any, callback: any) {
|
||||
this._forEachClient((client, done) => client.checkPolicies(
|
||||
checkPolicies(
|
||||
requestContextParams: any,
|
||||
userArn: string,
|
||||
options: any,
|
||||
callback: Callback<{ message: { body: any } }>
|
||||
) {
|
||||
this.#forEachClient((client, done) => client.checkPolicies(
|
||||
requestContextParams,
|
||||
userArn,
|
||||
options,
|
||||
done,
|
||||
done
|
||||
), (err, res) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
|
@ -194,40 +196,22 @@ export default class ChainBackend extends BaseBackend {
|
|||
});
|
||||
}
|
||||
|
||||
healthcheck(reqUid: string, callback: any) {
|
||||
this._forEachClient((client, done) =>
|
||||
healthcheck(reqUid: string, callback: Callback) {
|
||||
this.#forEachClient((client, done) =>
|
||||
client.healthcheck(reqUid, (err, res) => done(null, {
|
||||
error: !!err ? err : null,
|
||||
status: res,
|
||||
}),
|
||||
})
|
||||
), (err, res) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
const isError = res.some(results => !!results.error);
|
||||
const isError = res.some((results: any) => !!results.error);
|
||||
if (isError) {
|
||||
return callback(errors.InternalError, res);
|
||||
}
|
||||
return callback(null, res);
|
||||
});
|
||||
}
|
||||
|
||||
report(reqUid: string, callback: any) {
|
||||
this._forEachClient((client, done) =>
|
||||
client.report(reqUid, done),
|
||||
(err, res) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
const mergedRes = res.reduce((acc, val) => {
|
||||
Object.keys(val).forEach(k => {
|
||||
acc[k] = val[k];
|
||||
});
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
return callback(null, mergedRes);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,96 +0,0 @@
|
|||
import errors from '../../errors';
|
||||
|
||||
/**
|
||||
* Base backend class
|
||||
*
|
||||
* @class BaseBackend
|
||||
*/
|
||||
export default class BaseBackend {
|
||||
service: string;
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param {string} service - service identifer for construction arn
|
||||
*/
|
||||
constructor(service: string) {
|
||||
this.service = service;
|
||||
}
|
||||
|
||||
/** verifySignatureV2
|
||||
* @param stringToSign - string to sign built per AWS rules
|
||||
* @param signatureFromRequest - signature sent with request
|
||||
* @param accessKey - account accessKey
|
||||
* @param options - contains algorithm (SHA1 or SHA256)
|
||||
* @param callback - callback with either error or user info
|
||||
* @return calls callback
|
||||
*/
|
||||
verifySignatureV2(
|
||||
stringToSign: string,
|
||||
signatureFromRequest: string,
|
||||
accessKey: string,
|
||||
options: any,
|
||||
callback: any
|
||||
) {
|
||||
return callback(errors.AuthMethodNotImplemented);
|
||||
}
|
||||
|
||||
|
||||
/** verifySignatureV4
|
||||
* @param stringToSign - string to sign built per AWS rules
|
||||
* @param signatureFromRequest - signature sent with request
|
||||
* @param accessKey - account accessKey
|
||||
* @param region - region specified in request credential
|
||||
* @param scopeDate - date specified in request credential
|
||||
* @param options - options to send to Vault
|
||||
* (just contains reqUid for logging in Vault)
|
||||
* @param callback - callback with either error or user info
|
||||
* @return calls callback
|
||||
*/
|
||||
verifySignatureV4(
|
||||
stringToSign: string,
|
||||
signatureFromRequest: string,
|
||||
accessKey: string,
|
||||
region: string,
|
||||
scopeDate: string,
|
||||
options: any,
|
||||
callback: any
|
||||
) {
|
||||
return callback(errors.AuthMethodNotImplemented);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets canonical ID's for a list of accounts
|
||||
* based on email associated with account
|
||||
* @param emails - list of email addresses
|
||||
* @param options - to send log id to vault
|
||||
* @param callback - callback to calling function
|
||||
* @returns callback with either error or
|
||||
* object with email addresses as keys and canonical IDs
|
||||
* as values
|
||||
*/
|
||||
getCanonicalIds(emails: string[], options: any, callback: any) {
|
||||
return callback(errors.AuthMethodNotImplemented);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets email addresses (referred to as diplay names for getACL's)
|
||||
* for a list of accounts based on canonical IDs associated with account
|
||||
* @param canonicalIDs - list of canonicalIDs
|
||||
* @param options - to send log id to vault
|
||||
* @param callback - callback to calling function
|
||||
* @returns callback with either error or
|
||||
* an object from Vault containing account canonicalID
|
||||
* as each object key and an email address as the value (or "NotFound")
|
||||
*/
|
||||
getEmailAddresses(canonicalIDs: string[], options: any, callback: any) {
|
||||
return callback(errors.AuthMethodNotImplemented);
|
||||
}
|
||||
|
||||
checkPolicies(requestContextParams: any, userArn: string, options: any, callback: any) {
|
||||
return callback(null, { message: { body: [] } });
|
||||
}
|
||||
|
||||
healthcheck(reqUid: string, callback: any) {
|
||||
return callback(null, { code: 200, message: 'OK' });
|
||||
}
|
||||
}
|
|
@ -4,6 +4,7 @@ import joi from 'joi';
|
|||
import werelogs from 'werelogs';
|
||||
import * as types from './types';
|
||||
import { Account, Accounts } from './types';
|
||||
|
||||
import ARN from '../../../models/ARN';
|
||||
|
||||
/** Load authentication information from files or pre-loaded account objects */
|
||||
|
@ -49,9 +50,9 @@ export default class AuthLoader {
|
|||
addFile(filePath: string, options?: { legacy: true }): void;
|
||||
addFile(filePath: string, options = { legacy: true }) {
|
||||
// On deprecation, remove the legacy part and keep the promises.
|
||||
const readFunc: any = options.legacy ? fs.readFileSync : fs.promises.readFile;
|
||||
const readResult = readFunc(filePath, 'utf8') as Promise<string> | string;
|
||||
const prom = Promise.resolve(readResult).then((data) => {
|
||||
const fn: any = options.legacy ? fs.readFileSync : fs.promises.readFile;
|
||||
const temp = fn(filePath, 'utf8') as Promise<string> | string;
|
||||
const prom = Promise.resolve(temp).then((data) => {
|
||||
const authData = JSON.parse(data);
|
||||
this.addAccounts(authData, filePath);
|
||||
});
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
import crypto from 'crypto';
|
||||
import { Logger } from 'werelogs';
|
||||
import * as crypto from 'crypto';
|
||||
import errors from '../../../errors';
|
||||
import { calculateSigningKey, hashSignature } from './vaultUtilities';
|
||||
import Indexer from './Indexer';
|
||||
import BaseBackend from '../base';
|
||||
import BaseBackend from '../BaseBackend';
|
||||
import { Accounts } from './types';
|
||||
|
||||
function _formatResponse(userInfoToSend: any) {
|
||||
|
@ -17,40 +16,41 @@ function _formatResponse(userInfoToSend: any) {
|
|||
/**
|
||||
* Class that provides a memory backend for verifying signatures and getting
|
||||
* emails and canonical ids associated with an account.
|
||||
*
|
||||
* @class InMemoryBackend
|
||||
*/
|
||||
class InMemoryBackend extends BaseBackend {
|
||||
indexer: Indexer;
|
||||
formatResponse: any;
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param service - service identifer for construction arn
|
||||
* @param indexer - indexer instance for retrieving account info
|
||||
* @param formatter - function which accepts user info to send
|
||||
* back and returns it in an object
|
||||
*/
|
||||
constructor(service: string, indexer: Indexer, formatter: typeof _formatResponse) {
|
||||
constructor(service: string, indexer: Indexer) {
|
||||
super(service);
|
||||
this.indexer = indexer;
|
||||
this.formatResponse = formatter;
|
||||
}
|
||||
|
||||
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||
verifySignatureV2(
|
||||
stringToSign: string,
|
||||
signatureFromRequest: string,
|
||||
accessKey: string,
|
||||
options: any,
|
||||
callback: any,
|
||||
options: { algo: 'SHA256' | 'SHA1' },
|
||||
callback: (
|
||||
error: Error | null,
|
||||
data?: ReturnType<typeof _formatResponse>
|
||||
) => void
|
||||
) {
|
||||
const entity = this.indexer.getEntityByKey(accessKey);
|
||||
if (!entity) {
|
||||
return callback(errors.InvalidAccessKeyId);
|
||||
}
|
||||
const secretKey = this.indexer.getSecretKey(entity, accessKey);
|
||||
const reconstructedSig =
|
||||
hashSignature(stringToSign, secretKey, options.algo);
|
||||
const reconstructedSig = hashSignature(
|
||||
stringToSign,
|
||||
secretKey,
|
||||
options.algo
|
||||
);
|
||||
if (signatureFromRequest !== reconstructedSig) {
|
||||
return callback(errors.SignatureDoesNotMatch);
|
||||
}
|
||||
|
@ -58,21 +58,26 @@ class InMemoryBackend extends BaseBackend {
|
|||
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
||||
canonicalID: entity.canonicalID,
|
||||
arn: entity.arn,
|
||||
// @ts-ignore
|
||||
// @ts-ignore TODO why ?
|
||||
IAMdisplayName: entity.IAMdisplayName,
|
||||
};
|
||||
const vaultReturnObject = this.formatResponse(userInfoToSend);
|
||||
const vaultReturnObject = _formatResponse(userInfoToSend);
|
||||
return callback(null, vaultReturnObject);
|
||||
}
|
||||
|
||||
// TODO Options not used. Why ?
|
||||
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||
verifySignatureV4(
|
||||
stringToSign: string,
|
||||
signatureFromRequest: string,
|
||||
accessKey: string,
|
||||
region: string,
|
||||
scopeDate: string,
|
||||
options: any,
|
||||
callback: any,
|
||||
_options: { algo: 'SHA256' | 'SHA1' },
|
||||
callback: (
|
||||
err: Error | null,
|
||||
data?: ReturnType<typeof _formatResponse>
|
||||
) => void
|
||||
) {
|
||||
const entity = this.indexer.getEntityByKey(accessKey);
|
||||
if (!entity) {
|
||||
|
@ -80,8 +85,10 @@ class InMemoryBackend extends BaseBackend {
|
|||
}
|
||||
const secretKey = this.indexer.getSecretKey(entity, accessKey);
|
||||
const signingKey = calculateSigningKey(secretKey, region, scopeDate);
|
||||
const reconstructedSig = crypto.createHmac('sha256', signingKey)
|
||||
.update(stringToSign, 'binary').digest('hex');
|
||||
const reconstructedSig = crypto
|
||||
.createHmac('sha256', signingKey)
|
||||
.update(stringToSign, 'binary')
|
||||
.digest('hex');
|
||||
if (signatureFromRequest !== reconstructedSig) {
|
||||
return callback(errors.SignatureDoesNotMatch);
|
||||
}
|
||||
|
@ -89,23 +96,28 @@ class InMemoryBackend extends BaseBackend {
|
|||
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
||||
canonicalID: entity.canonicalID,
|
||||
arn: entity.arn,
|
||||
// @ts-ignore
|
||||
// @ts-ignore TODO why ?
|
||||
IAMdisplayName: entity.IAMdisplayName,
|
||||
};
|
||||
const vaultReturnObject = this.formatResponse(userInfoToSend);
|
||||
const vaultReturnObject = _formatResponse(userInfoToSend);
|
||||
return callback(null, vaultReturnObject);
|
||||
}
|
||||
|
||||
getCanonicalIds(emails: string[], log: Logger, cb: any) {
|
||||
// TODO log not used. Why ?
|
||||
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||
getCanonicalIds(
|
||||
emails: string[],
|
||||
_log: any,
|
||||
cb: (err: null, data: { message: { body: any } }) => void
|
||||
) {
|
||||
const results = {};
|
||||
emails.forEach(email => {
|
||||
emails.forEach((email) => {
|
||||
const lowercasedEmail = email.toLowerCase();
|
||||
const entity = this.indexer.getEntityByEmail(lowercasedEmail);
|
||||
if (!entity) {
|
||||
results[email] = 'NotFound';
|
||||
} else {
|
||||
results[email] =
|
||||
entity.canonicalID;
|
||||
results[email] = entity.canonicalID;
|
||||
}
|
||||
});
|
||||
const vaultReturnObject = {
|
||||
|
@ -116,9 +128,15 @@ class InMemoryBackend extends BaseBackend {
|
|||
return cb(null, vaultReturnObject);
|
||||
}
|
||||
|
||||
getEmailAddresses(canonicalIDs: string[], options: any, cb: any) {
|
||||
// TODO options not used. Why ?
|
||||
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||
getEmailAddresses(
|
||||
canonicalIDs: string[],
|
||||
_options: any,
|
||||
cb: (err: null, data: { message: { body: any } }) => void
|
||||
) {
|
||||
const results = {};
|
||||
canonicalIDs.forEach(canonicalId => {
|
||||
canonicalIDs.forEach((canonicalId) => {
|
||||
const foundEntity = this.indexer.getEntityByCanId(canonicalId);
|
||||
if (!foundEntity || !foundEntity.email) {
|
||||
results[canonicalId] = 'NotFound';
|
||||
|
@ -134,19 +152,26 @@ class InMemoryBackend extends BaseBackend {
|
|||
return cb(null, vaultReturnObject);
|
||||
}
|
||||
|
||||
// TODO options not used. Why ?
|
||||
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||
/**
|
||||
* Gets accountIds for a list of accounts based on
|
||||
* the canonical IDs associated with the account
|
||||
* @param canonicalIDs - list of canonicalIDs
|
||||
* @param options - to send log id to vault
|
||||
* @param cb - callback to calling function
|
||||
* @returns callback with either error or
|
||||
* @return The next is wrong. Here to keep archives.
|
||||
* callback with either error or
|
||||
* an object from Vault containing account canonicalID
|
||||
* as each object key and an accountId as the value (or "NotFound")
|
||||
*/
|
||||
getAccountIds(canonicalIDs: string[], options: any, cb: any) {
|
||||
getAccountIds(
|
||||
canonicalIDs: string[],
|
||||
_options: any,
|
||||
cb: (err: null, data: { message: { body: any } }) => void
|
||||
) {
|
||||
const results = {};
|
||||
canonicalIDs.forEach(canonicalID => {
|
||||
canonicalIDs.forEach((canonicalID) => {
|
||||
const foundEntity = this.indexer.getEntityByCanId(canonicalID);
|
||||
if (!foundEntity || !foundEntity.shortid) {
|
||||
results[canonicalID] = 'Not Found';
|
||||
|
@ -161,34 +186,16 @@ class InMemoryBackend extends BaseBackend {
|
|||
};
|
||||
return cb(null, vaultReturnObject);
|
||||
}
|
||||
|
||||
report(log: Logger, callback: any) {
|
||||
return callback(null, {});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class S3AuthBackend extends InMemoryBackend {
|
||||
/**
|
||||
* @constructor
|
||||
* @param authdata - the authentication config file's data
|
||||
* @param authdata.accounts - array of account objects
|
||||
* @param authdata.accounts[].name - account name
|
||||
* @param authdata.accounts[].email - account email
|
||||
* @param authdata.accounts[].arn - IAM resource name
|
||||
* @param authdata.accounts[].canonicalID - account canonical ID
|
||||
* @param authdata.accounts[].shortid - short account ID
|
||||
* @param authdata.accounts[].keys - array of key objects
|
||||
* @param authdata.accounts[].keys[].access - access key
|
||||
* @param authdata.accounts[].keys[].secret - secret key
|
||||
*/
|
||||
constructor(authdata?: Accounts) {
|
||||
super('s3', new Indexer(authdata), _formatResponse);
|
||||
constructor(authdata: Accounts) {
|
||||
super('s3', new Indexer(authdata));
|
||||
}
|
||||
|
||||
refreshAuthData(authData?: Accounts) {
|
||||
refreshAuthData(authData: Accounts) {
|
||||
this.indexer = new Indexer(authData);
|
||||
}
|
||||
}
|
||||
|
||||
export { S3AuthBackend as s3 }
|
||||
export { S3AuthBackend as s3 };
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
import joi from 'joi';
|
||||
|
||||
export type Callback<Data = any> = (err?: Error | null | undefined, data?: Data) => void;
|
||||
export type Callback<Data = any> = (err: Error | null | undefined, data?: Data) => void;
|
||||
|
||||
export type Credentials = { access: string; secret: string };
|
||||
export type Key = { access: string; secret: string };
|
||||
export type Base = {
|
||||
arn: string;
|
||||
canonicalID: string;
|
||||
shortid: string;
|
||||
email: string;
|
||||
keys: Credentials[];
|
||||
keys: Key[];
|
||||
};
|
||||
export type Account = Base & { name: string; users: any[] };
|
||||
export type Accounts = { accounts: Account[] };
|
||||
|
|
|
@ -15,7 +15,7 @@ export function hashSignature(
|
|||
return hmacObject.update(stringToSign, 'binary').digest('base64');
|
||||
}
|
||||
|
||||
const sha256Digest = (key: string | Buffer, data: string) => {
|
||||
const sha256 = (key: string | Buffer, data: string) => {
|
||||
return crypto.createHmac('sha256', key).update(data, 'binary').digest();
|
||||
};
|
||||
|
||||
|
@ -32,9 +32,9 @@ export function calculateSigningKey(
|
|||
scopeDate: string,
|
||||
service?: string
|
||||
): Buffer {
|
||||
const dateKey = sha256Digest(`AWS4${secretKey}`, scopeDate);
|
||||
const dateRegionKey = sha256Digest(dateKey, region);
|
||||
const dateRegionServiceKey = sha256Digest(dateRegionKey, service || 's3');
|
||||
const signingKey = sha256Digest(dateRegionServiceKey, 'aws4_request');
|
||||
const dateKey = sha256(`AWS4${secretKey}`, scopeDate);
|
||||
const dateRegionKey = sha256(dateKey, region);
|
||||
const dateRegionServiceKey = sha256(dateRegionKey, service || 's3');
|
||||
const signingKey = sha256(dateRegionServiceKey, 'aws4_request');
|
||||
return signingKey;
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ export default function checkRequestExpiry(timestamp: number, log: Logger) {
|
|||
log.trace('request timestamp', { requestTimestamp: timestamp });
|
||||
log.trace('current timestamp', { currentTimestamp: currentTime });
|
||||
|
||||
const fifteenMinutes = (15 * 60 * 1000);
|
||||
const fifteenMinutes = 15 * 60 * 1000;
|
||||
if (currentTime - timestamp > fifteenMinutes) {
|
||||
log.trace('request timestamp is not within 15 minutes of current time');
|
||||
log.debug('request time too skewed', { timestamp });
|
||||
|
|
|
@ -26,11 +26,11 @@ export default function constructStringToSign(
|
|||
|
||||
const contentMD5 = headers['content-md5'] ?
|
||||
headers['content-md5'] : query['Content-MD5'];
|
||||
stringToSign += (contentMD5 ? `${contentMD5}\n` : '\n');
|
||||
stringToSign += contentMD5 ? `${contentMD5}\n` : '\n';
|
||||
|
||||
const contentType = headers['content-type'] ?
|
||||
headers['content-type'] : query['Content-Type'];
|
||||
stringToSign += (contentType ? `${contentType}\n` : '\n');
|
||||
stringToSign += contentType ? `${contentType}\n` : '\n';
|
||||
|
||||
/*
|
||||
AWS docs are conflicting on whether to include x-amz-date header here
|
||||
|
|
|
@ -16,15 +16,19 @@ export function check(request: any, log: Logger, data: { [key: string]: string }
|
|||
}
|
||||
|
||||
// Check to make sure timestamp is within 15 minutes of current time
|
||||
let timestamp = headers['x-amz-date'] ?
|
||||
headers['x-amz-date'] : headers.date;
|
||||
let timestamp = headers['x-amz-date']
|
||||
? headers['x-amz-date']
|
||||
: headers.date;
|
||||
timestamp = Date.parse(timestamp);
|
||||
if (!timestamp) {
|
||||
log.debug('missing or invalid date header',
|
||||
{ method: 'auth/v2/headerAuthCheck.check' });
|
||||
return { err: errors.AccessDenied.
|
||||
customizeDescription('Authentication requires a valid Date or ' +
|
||||
'x-amz-date header') };
|
||||
log.debug('missing or invalid date header', {
|
||||
method: 'auth/v2/headerAuthCheck.check',
|
||||
});
|
||||
return {
|
||||
err: errors.AccessDenied.customizeDescription(
|
||||
'Authentication requires a valid Date or ' + 'x-amz-date header'
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
const err = checkRequestExpiry(timestamp, log);
|
||||
|
@ -45,8 +49,10 @@ export function check(request: any, log: Logger, data: { [key: string]: string }
|
|||
log.debug('invalid authorization header', { authInfo });
|
||||
return { err: errors.InvalidArgument };
|
||||
}
|
||||
const accessKey = semicolonIndex > 4 ?
|
||||
authInfo.substring(4, semicolonIndex).trim() : undefined;
|
||||
const accessKey =
|
||||
semicolonIndex > 4
|
||||
? authInfo.substring(4, semicolonIndex).trim()
|
||||
: undefined;
|
||||
if (typeof accessKey !== 'string' || accessKey.length === 0) {
|
||||
log.trace('invalid authorization header', { authInfo });
|
||||
return { err: errors.MissingSecurityHeader };
|
||||
|
|
|
@ -27,26 +27,28 @@ export function check(request: any, log: Logger, data: { [key: string]: string }
|
|||
*/
|
||||
const expirationTime = parseInt(data.Expires, 10) * 1000;
|
||||
if (Number.isNaN(expirationTime)) {
|
||||
log.debug('invalid expires parameter',
|
||||
{ expires: data.Expires });
|
||||
log.debug('invalid expires parameter', { expires: data.Expires });
|
||||
return { err: errors.MissingSecurityHeader };
|
||||
}
|
||||
|
||||
const currentTime = Date.now();
|
||||
|
||||
const preSignedURLExpiry = process.env.PRE_SIGN_URL_EXPIRY
|
||||
&& !Number.isNaN(process.env.PRE_SIGN_URL_EXPIRY)
|
||||
const preSignedURLExpiry =
|
||||
process.env.PRE_SIGN_URL_EXPIRY &&
|
||||
!Number.isNaN(process.env.PRE_SIGN_URL_EXPIRY)
|
||||
? Number.parseInt(process.env.PRE_SIGN_URL_EXPIRY, 10)
|
||||
: constants.defaultPreSignedURLExpiry * 1000;
|
||||
|
||||
if (expirationTime > currentTime + preSignedURLExpiry) {
|
||||
log.debug('expires parameter too far in future',
|
||||
{ expires: request.query.Expires });
|
||||
log.debug('expires parameter too far in future', {
|
||||
expires: request.query.Expires,
|
||||
});
|
||||
return { err: errors.AccessDenied };
|
||||
}
|
||||
if (currentTime > expirationTime) {
|
||||
log.debug('current time exceeds expires time',
|
||||
{ expires: request.query.Expires });
|
||||
log.debug('current time exceeds expires time', {
|
||||
expires: request.query.Expires,
|
||||
});
|
||||
return { err: errors.RequestTimeTooSkewed };
|
||||
}
|
||||
const accessKey = data.AWSAccessKeyId;
|
||||
|
|
|
@ -35,30 +35,33 @@ export default function awsURIencode(
|
|||
encodeSlash?: boolean,
|
||||
noEncodeStar?: boolean
|
||||
) {
|
||||
const encSlash = encodeSlash === undefined ? true : encodeSlash;
|
||||
let encoded = '';
|
||||
/**
|
||||
* Duplicate query params are not suppported by AWS S3 APIs. These params
|
||||
* are parsed as Arrays by Node.js HTTP parser which breaks this method
|
||||
*/
|
||||
if (typeof input !== 'string') {
|
||||
return '';
|
||||
return encoded;
|
||||
}
|
||||
let encoded = "";
|
||||
const slash = encodeSlash === undefined || encodeSlash ? '%2F' : '/';
|
||||
const star = noEncodeStar !== undefined && noEncodeStar ? '*' : '%2A';
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
let ch = input.charAt(i);
|
||||
if ((ch >= 'A' && ch <= 'Z') ||
|
||||
if (
|
||||
(ch >= 'A' && ch <= 'Z') ||
|
||||
(ch >= 'a' && ch <= 'z') ||
|
||||
(ch >= '0' && ch <= '9') ||
|
||||
ch === '_' || ch === '-' ||
|
||||
ch === '~' || ch === '.') {
|
||||
ch === '_' ||
|
||||
ch === '-' ||
|
||||
ch === '~' ||
|
||||
ch === '.'
|
||||
) {
|
||||
encoded = encoded.concat(ch);
|
||||
} else if (ch === ' ') {
|
||||
encoded = encoded.concat('%20');
|
||||
} else if (ch === '/') {
|
||||
encoded = encoded.concat(slash);
|
||||
encoded = encoded.concat(encSlash ? '%2F' : ch);
|
||||
} else if (ch === '*') {
|
||||
encoded = encoded.concat(star);
|
||||
encoded = encoded.concat(noEncodeStar ? '*' : '%2A');
|
||||
} else {
|
||||
if (ch >= '\uD800' && ch <= '\uDBFF') {
|
||||
// If this character is a high surrogate peek the next character
|
||||
|
|
|
@ -15,9 +15,9 @@ export default function constructStringToSign(params: {
|
|||
timestamp: string;
|
||||
query: { [key: string]: string };
|
||||
log?: Logger;
|
||||
proxyPath?: string;
|
||||
proxyPath: string;
|
||||
awsService: string;
|
||||
}): string | Error {
|
||||
}): string {
|
||||
const {
|
||||
request,
|
||||
signedHeaders,
|
||||
|
@ -52,9 +52,11 @@ export default function constructStringToSign(params: {
|
|||
log.debug('constructed canonicalRequest', { canonicalReqResult });
|
||||
}
|
||||
const sha256 = crypto.createHash('sha256');
|
||||
const canonicalHex = sha256.update(canonicalReqResult, 'binary')
|
||||
const canonicalHex = sha256
|
||||
.update(canonicalReqResult, 'binary')
|
||||
.digest('hex');
|
||||
const stringToSign = `AWS4-HMAC-SHA256\n${timestamp}\n` +
|
||||
const stringToSign =
|
||||
`AWS4-HMAC-SHA256\n${timestamp}\n` +
|
||||
`${credentialScope}\n${canonicalHex}`;
|
||||
return stringToSign;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import awsURIencode from './awsURIencode';
|
||||
import * as crypto from 'crypto';
|
||||
import * as queryString from 'querystring';
|
||||
import awsURIencode from './awsURIencode';
|
||||
|
||||
/**
|
||||
* createCanonicalRequest - creates V4 canonical request
|
||||
|
|
|
@ -56,8 +56,9 @@ export function check(
|
|||
if (payloadChecksum === 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') {
|
||||
log.trace('requesting streaming v4 auth');
|
||||
if (request.method !== 'PUT') {
|
||||
log.debug('streaming v4 auth for put only',
|
||||
{ method: 'auth/v4/headerAuthCheck.check' });
|
||||
log.debug('streaming v4 auth for put only', {
|
||||
method: 'auth/v4/headerAuthCheck.check',
|
||||
});
|
||||
return { err: errors.InvalidArgument };
|
||||
}
|
||||
if (!request.headers['x-amz-decoded-content-length']) {
|
||||
|
@ -82,9 +83,12 @@ export function check(
|
|||
if (xAmzDate) {
|
||||
const xAmzDateArr = xAmzDate.split('T');
|
||||
// check that x-amz- date has the correct format and after epochTime
|
||||
if (xAmzDateArr.length === 2 && xAmzDateArr[0].length === 8
|
||||
&& xAmzDateArr[1].length === 7
|
||||
&& Number.parseInt(xAmzDateArr[0], 10) > 19700101) {
|
||||
if (
|
||||
xAmzDateArr.length === 2 &&
|
||||
xAmzDateArr[0].length === 8 &&
|
||||
xAmzDateArr[1].length === 7 &&
|
||||
Number.parseInt(xAmzDateArr[0], 10) > 19700101
|
||||
) {
|
||||
// format of x-amz- date is ISO 8601: YYYYMMDDTHHMMSSZ
|
||||
timestamp = request.headers['x-amz-date'];
|
||||
}
|
||||
|
@ -92,18 +96,27 @@ export function check(
|
|||
timestamp = convertUTCtoISO8601(request.headers.date);
|
||||
}
|
||||
if (!timestamp) {
|
||||
log.debug('missing or invalid date header',
|
||||
{ method: 'auth/v4/headerAuthCheck.check' });
|
||||
return { err: errors.AccessDenied.
|
||||
customizeDescription('Authentication requires a valid Date or ' +
|
||||
'x-amz-date header') };
|
||||
log.debug('missing or invalid date header', {
|
||||
method: 'auth/v4/headerAuthCheck.check',
|
||||
});
|
||||
return {
|
||||
err: errors.AccessDenied.customizeDescription(
|
||||
'Authentication requires a valid Date or ' + 'x-amz-date header'
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
const validationResult = validateCredentials(credentialsArr, timestamp,
|
||||
log);
|
||||
const validationResult = validateCredentials(
|
||||
credentialsArr,
|
||||
timestamp,
|
||||
log
|
||||
);
|
||||
if (validationResult instanceof Error) {
|
||||
log.debug('credentials in improper format', { credentialsArr,
|
||||
timestamp, validationResult });
|
||||
log.debug('credentials in improper format', {
|
||||
credentialsArr,
|
||||
timestamp,
|
||||
validationResult,
|
||||
});
|
||||
return { err: validationResult };
|
||||
}
|
||||
// credentialsArr is [accessKey, date, region, aws-service, aws4_request]
|
||||
|
@ -126,20 +139,23 @@ export function check(
|
|||
// expiry is as set out in the policy.
|
||||
|
||||
// 15 minutes in seconds
|
||||
const expiry = (15 * 60);
|
||||
const expiry = 15 * 60;
|
||||
const isTimeSkewed = checkTimeSkew(timestamp, expiry, log);
|
||||
if (isTimeSkewed) {
|
||||
return { err: errors.RequestTimeTooSkewed };
|
||||
}
|
||||
|
||||
let proxyPath: string | undefined;
|
||||
let proxyPath: string | null = null;
|
||||
if (request.headers.proxy_path) {
|
||||
try {
|
||||
proxyPath = decodeURIComponent(request.headers.proxy_path);
|
||||
} catch (err) {
|
||||
log.debug('invalid proxy_path header', { proxyPath, err });
|
||||
return { err: errors.InvalidArgument.customizeDescription(
|
||||
'invalid proxy_path header') };
|
||||
return {
|
||||
err: errors.InvalidArgument.customizeDescription(
|
||||
'invalid proxy_path header'
|
||||
),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -152,14 +168,15 @@ export function check(
|
|||
timestamp,
|
||||
payloadChecksum,
|
||||
awsService: service,
|
||||
proxyPath,
|
||||
proxyPath: proxyPath!,
|
||||
});
|
||||
log.trace('constructed stringToSign', { stringToSign });
|
||||
// TODO Why?
|
||||
// @ts-ignore
|
||||
if (stringToSign instanceof Error) {
|
||||
return { err: stringToSign };
|
||||
}
|
||||
|
||||
|
||||
return {
|
||||
err: null,
|
||||
params: {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { Logger } from 'werelogs';
|
||||
import * as constants from '../../constants';
|
||||
import errors from '../../errors';
|
||||
|
||||
import constructStringToSign from './constructStringToSign';
|
||||
import { checkTimeSkew, convertAmzTimeToMs } from './timeUtils';
|
||||
import { validateCredentials, extractQueryParams } from './validateInputs';
|
||||
|
@ -38,11 +39,13 @@ export function check(request: any, log: Logger, data: { [key: string]: string }
|
|||
return { err: errors.AccessDenied };
|
||||
}
|
||||
|
||||
const validationResult = validateCredentials(credential, timestamp,
|
||||
log);
|
||||
const validationResult = validateCredentials(credential, timestamp, log);
|
||||
if (validationResult instanceof Error) {
|
||||
log.debug('credentials in improper format', { credential,
|
||||
timestamp, validationResult });
|
||||
log.debug('credentials in improper format', {
|
||||
credential,
|
||||
timestamp,
|
||||
validationResult,
|
||||
});
|
||||
return { err: validationResult };
|
||||
}
|
||||
const accessKey = credential[0];
|
||||
|
@ -56,14 +59,17 @@ export function check(request: any, log: Logger, data: { [key: string]: string }
|
|||
return { err: errors.RequestTimeTooSkewed };
|
||||
}
|
||||
|
||||
let proxyPath: string | undefined;
|
||||
let proxyPath: string | null = null;
|
||||
if (request.headers.proxy_path) {
|
||||
try {
|
||||
proxyPath = decodeURIComponent(request.headers.proxy_path);
|
||||
} catch (err) {
|
||||
log.debug('invalid proxy_path header', { proxyPath });
|
||||
return { err: errors.InvalidArgument.customizeDescription(
|
||||
'invalid proxy_path header') };
|
||||
return {
|
||||
err: errors.InvalidArgument.customizeDescription(
|
||||
'invalid proxy_path header'
|
||||
),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -89,11 +95,12 @@ export function check(request: any, log: Logger, data: { [key: string]: string }
|
|||
signedHeaders,
|
||||
payloadChecksum,
|
||||
timestamp,
|
||||
credentialScope:
|
||||
`${scopeDate}/${region}/${service}/${requestType}`,
|
||||
credentialScope: `${scopeDate}/${region}/${service}/${requestType}`,
|
||||
awsService: service,
|
||||
proxyPath,
|
||||
proxyPath: proxyPath!,
|
||||
});
|
||||
// TODO Why?
|
||||
// @ts-ignore
|
||||
if (stringToSign instanceof Error) {
|
||||
return { err: stringToSign };
|
||||
}
|
||||
|
|
|
@ -1,21 +1,11 @@
|
|||
import { Transform } from 'stream';
|
||||
import async from 'async';
|
||||
import errors from '../../../errors';
|
||||
import { Logger } from 'werelogs';
|
||||
import Vault, { AuthV4RequestParams } from '../../Vault';
|
||||
import { Callback } from '../../backends/in_memory/types';
|
||||
|
||||
import Vault from '../../Vault';
|
||||
import errors from '../../../errors';
|
||||
import constructChunkStringToSign from './constructChunkStringToSign';
|
||||
|
||||
export type TransformParams = {
|
||||
accessKey: string;
|
||||
signatureFromRequest: string;
|
||||
region: string;
|
||||
scopeDate: string;
|
||||
timestamp: string;
|
||||
credentialScope: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* This class is designed to handle the chunks sent in a streaming
|
||||
* v4 Auth request
|
||||
|
@ -25,45 +15,60 @@ export default class V4Transform extends Transform {
|
|||
cb: Callback;
|
||||
accessKey: string;
|
||||
region: string;
|
||||
/** Date parsed from headers in ISO8601. */
|
||||
scopeDate: string;
|
||||
/** Date parsed from headers in ISO8601. */
|
||||
timestamp: string;
|
||||
/** Items from auth header, plus the string 'aws4_request' joined with '/': timestamp/region/aws-service/aws4_request */
|
||||
credentialScope: string;
|
||||
lastSignature: string;
|
||||
lastSignature?: string;
|
||||
currentSignature?: string;
|
||||
haveMetadata: boolean;
|
||||
seekingDataSize: number;
|
||||
currentData?: any;
|
||||
dataCursor: number;
|
||||
currentMetadata: any[];
|
||||
currentMetadata: Buffer[];
|
||||
lastPieceDone: boolean;
|
||||
lastChunk: boolean;
|
||||
vault: Vault;
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param streamingV4Params - info for chunk authentication
|
||||
* @param streamingV4Params.accessKey - requester's accessKey
|
||||
* @param streamingV4Params.signatureFromRequest - signature
|
||||
* @param {object} streamingV4Params - info for chunk authentication
|
||||
* @param {string} streamingV4Params.accessKey - requester's accessKey
|
||||
* @param {string} streamingV4Params.signatureFromRequest - signature
|
||||
* sent with headers
|
||||
* @param streamingV4Params.region - region sent with auth header
|
||||
* @param streamingV4Params.scopeDate - date sent with auth header
|
||||
* @param streamingV4Params.timestamp - date parsed from headers
|
||||
* @param {string} streamingV4Params.region - region sent with auth header
|
||||
* @param {string} streamingV4Params.scopeDate - date sent with auth header
|
||||
* @param {string} streamingV4Params.timestamp - date parsed from headers
|
||||
* in ISO 8601 format: YYYYMMDDTHHMMSSZ
|
||||
* @param streamingV4Params.credentialScope - items from auth
|
||||
* @param {string} streamingV4Params.credentialScope - items from auth
|
||||
* header plus the string 'aws4_request' joined with '/':
|
||||
* timestamp/region/aws-service/aws4_request
|
||||
* @param vault - Vault instance passed from CloudServer
|
||||
* @param log - logger object
|
||||
* @param cb - callback to api
|
||||
* @param {object} vault - Vault instance passed from CloudServer
|
||||
* @param {object} log - logger object
|
||||
* @param {function} cb - callback to api
|
||||
*/
|
||||
constructor(
|
||||
streamingV4Params: TransformParams,
|
||||
streamingV4Params: {
|
||||
accessKey: string,
|
||||
signatureFromRequest: string,
|
||||
region: string,
|
||||
scopeDate: string,
|
||||
timestamp: string,
|
||||
credentialScope: string
|
||||
},
|
||||
vault: Vault,
|
||||
log: Logger,
|
||||
cb: Callback,
|
||||
cb: Callback
|
||||
) {
|
||||
const { accessKey, signatureFromRequest, region, scopeDate, timestamp,
|
||||
credentialScope } = streamingV4Params;
|
||||
const {
|
||||
accessKey,
|
||||
signatureFromRequest,
|
||||
region,
|
||||
scopeDate,
|
||||
timestamp,
|
||||
credentialScope,
|
||||
} = streamingV4Params;
|
||||
super({});
|
||||
this.log = log;
|
||||
this.cb = cb;
|
||||
|
@ -113,32 +118,34 @@ export default class V4Transform extends Transform {
|
|||
this.currentMetadata.push(remainingPlusStoredMetadata);
|
||||
return { completeMetadata: false };
|
||||
}
|
||||
let fullMetadata = remainingPlusStoredMetadata.slice(0,
|
||||
lineBreakIndex);
|
||||
let fullMetadata = remainingPlusStoredMetadata.slice(0, lineBreakIndex);
|
||||
|
||||
// handle extra line break on end of data chunk
|
||||
if (fullMetadata.length === 0) {
|
||||
const chunkWithoutLeadingLineBreak = remainingPlusStoredMetadata
|
||||
.slice(2);
|
||||
const chunkWithoutLeadingLineBreak =
|
||||
remainingPlusStoredMetadata.slice(2);
|
||||
// find second line break
|
||||
lineBreakIndex = chunkWithoutLeadingLineBreak.indexOf('\r\n');
|
||||
if (lineBreakIndex < 0) {
|
||||
this.currentMetadata.push(chunkWithoutLeadingLineBreak);
|
||||
return { completeMetadata: false };
|
||||
}
|
||||
fullMetadata = chunkWithoutLeadingLineBreak.slice(0,
|
||||
lineBreakIndex);
|
||||
fullMetadata = chunkWithoutLeadingLineBreak.slice(
|
||||
0,
|
||||
lineBreakIndex
|
||||
);
|
||||
}
|
||||
|
||||
const splitMeta = fullMetadata.toString().split(';');
|
||||
this.log.trace('parsed full metadata for chunk', { splitMeta });
|
||||
if (splitMeta.length !== 2) {
|
||||
this.log.trace('chunk body did not contain correct ' +
|
||||
'metadata format');
|
||||
this.log.trace(
|
||||
'chunk body did not contain correct ' + 'metadata format'
|
||||
);
|
||||
return { err: errors.InvalidArgument };
|
||||
}
|
||||
// chunk-size is sent in hex
|
||||
const dataSize = Number.parseInt(splitMeta[0], 16);
|
||||
let dataSize = Number.parseInt(splitMeta[0], 16);
|
||||
if (Number.isNaN(dataSize)) {
|
||||
this.log.trace('chunk body did not contain valid size');
|
||||
return { err: errors.InvalidArgument };
|
||||
|
@ -165,8 +172,9 @@ export default class V4Transform extends Transform {
|
|||
completeMetadata: true,
|
||||
// start slice at lineBreak plus 2 to remove line break at end of
|
||||
// metadata piece since length of '\r\n' is 2
|
||||
unparsedChunk: remainingPlusStoredMetadata
|
||||
.slice(lineBreakIndex + 2),
|
||||
unparsedChunk: remainingPlusStoredMetadata.slice(
|
||||
lineBreakIndex + 2
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -177,51 +185,54 @@ export default class V4Transform extends Transform {
|
|||
* @param done - callback to _transform
|
||||
* @return executes callback with err if applicable
|
||||
*/
|
||||
_authenticate(dataToSend: Buffer | null, done: Callback) {
|
||||
_authenticate(dataToSend: Buffer | null, done: (err?: Error) => void) {
|
||||
// use prior sig to construct new string to sign
|
||||
const stringToSign = constructChunkStringToSign(this.timestamp,
|
||||
this.credentialScope, this.lastSignature, dataToSend ?? undefined);
|
||||
this.log.trace('constructed chunk string to sign',
|
||||
{ stringToSign });
|
||||
const stringToSign = constructChunkStringToSign(
|
||||
this.timestamp,
|
||||
this.credentialScope,
|
||||
this.lastSignature!,
|
||||
dataToSend
|
||||
);
|
||||
this.log.trace('constructed chunk string to sign', { stringToSign });
|
||||
// once used prior sig to construct string to sign, reassign
|
||||
// lastSignature to current signature
|
||||
this.lastSignature = this.currentSignature!;
|
||||
const vaultParams: AuthV4RequestParams = {
|
||||
this.lastSignature = this.currentSignature;
|
||||
const vaultParams = {
|
||||
log: this.log,
|
||||
data: {
|
||||
accessKey: this.accessKey,
|
||||
signatureFromRequest: this.currentSignature!,
|
||||
signatureFromRequest: this.currentSignature,
|
||||
region: this.region,
|
||||
scopeDate: this.scopeDate,
|
||||
stringToSign,
|
||||
// TODO FIXME This can not work
|
||||
// @ts-expect-errors
|
||||
timestamp: this.timestamp,
|
||||
credentialScope: this.credentialScope,
|
||||
},
|
||||
};
|
||||
return this.vault.authenticateV4Request(vaultParams, null, err => {
|
||||
return this.vault.authenticateV4Request(vaultParams, null, (err: Error) => {
|
||||
if (err) {
|
||||
this.log.trace('err from vault on streaming v4 auth',
|
||||
{ error: err, paramsSentToVault: vaultParams.data });
|
||||
this.log.trace('err from vault on streaming v4 auth', {
|
||||
error: err,
|
||||
paramsSentToVault: vaultParams.data,
|
||||
});
|
||||
return done(err);
|
||||
}
|
||||
return done();
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// TODO encoding unused. Why?
|
||||
/**
|
||||
* This function will parse the chunk into metadata and data,
|
||||
* use the metadata to authenticate with vault and send the
|
||||
* data on to be stored if authentication passes
|
||||
*
|
||||
* @param chunk - chunk from request body
|
||||
* @param _encoding - Data encoding unused
|
||||
* @param encoding - Data encoding
|
||||
* @param callback - Callback(err, justDataChunk, encoding)
|
||||
* @return executes callback with err if applicable
|
||||
*/
|
||||
_transform(chunk: Buffer, _encoding: string, callback: Callback) {
|
||||
_transform(chunk: Buffer, _encoding: string, callback: (err?: Error) => void) {
|
||||
// 'chunk' here is the node streaming chunk
|
||||
// transfer-encoding chunks should be of the format:
|
||||
// string(IntHexBase(chunk-size)) + ";chunk-signature=" +
|
||||
|
@ -230,9 +241,10 @@ export default class V4Transform extends Transform {
|
|||
|
||||
if (this.lastPieceDone) {
|
||||
const slice = chunk.slice(0, 10);
|
||||
this.log.trace('received chunk after end.' +
|
||||
'See first 10 bytes of chunk',
|
||||
{ chunk: slice.toString() });
|
||||
this.log.trace(
|
||||
'received chunk after end.' + 'See first 10 bytes of chunk',
|
||||
{ chunk: slice.toString() }
|
||||
);
|
||||
return callback();
|
||||
}
|
||||
let unparsedChunk = chunk;
|
||||
|
@ -241,10 +253,11 @@ export default class V4Transform extends Transform {
|
|||
// test function
|
||||
() => chunkLeftToEvaluate,
|
||||
// async function
|
||||
done => {
|
||||
(done) => {
|
||||
if (!this.haveMetadata) {
|
||||
this.log.trace('do not have metadata so calling ' +
|
||||
'_parseMetadata');
|
||||
this.log.trace(
|
||||
'do not have metadata so calling ' + '_parseMetadata'
|
||||
);
|
||||
// need to parse our metadata
|
||||
const parsedMetadataResults =
|
||||
this._parseMetadata(unparsedChunk);
|
||||
|
@ -258,13 +271,11 @@ export default class V4Transform extends Transform {
|
|||
}
|
||||
// have metadata so reset unparsedChunk to remaining
|
||||
// without metadata piece
|
||||
// TODO Is that okay?
|
||||
// @ts-expect-errors
|
||||
unparsedChunk = parsedMetadataResults.unparsedChunk;
|
||||
unparsedChunk = parsedMetadataResults.unparsedChunk!;
|
||||
}
|
||||
if (this.lastChunk) {
|
||||
this.log.trace('authenticating final chunk with no data');
|
||||
return this._authenticate(null, err => {
|
||||
return this._authenticate(null, (err) => {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
|
@ -283,17 +294,18 @@ export default class V4Transform extends Transform {
|
|||
}
|
||||
// parse just the next data piece without \r\n at the end
|
||||
// (therefore, minus 2)
|
||||
const nextDataPiece =
|
||||
unparsedChunk.slice(0, this.seekingDataSize - 2);
|
||||
const nextDataPiece = unparsedChunk.slice(
|
||||
0,
|
||||
this.seekingDataSize - 2
|
||||
);
|
||||
// add parsed data piece to other currentData pieces
|
||||
// so that this.currentData is the full data piece
|
||||
nextDataPiece.copy(this.currentData, this.dataCursor);
|
||||
return this._authenticate(this.currentData, err => {
|
||||
return this._authenticate(this.currentData, (err) => {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
unparsedChunk =
|
||||
unparsedChunk.slice(this.seekingDataSize);
|
||||
unparsedChunk = unparsedChunk.slice(this.seekingDataSize);
|
||||
this.push(this.currentData);
|
||||
this.haveMetadata = false;
|
||||
this.seekingDataSize = -1;
|
||||
|
@ -304,13 +316,13 @@ export default class V4Transform extends Transform {
|
|||
});
|
||||
},
|
||||
// final callback
|
||||
err => {
|
||||
(err) => {
|
||||
if (err) {
|
||||
return this.cb(err as any);
|
||||
}
|
||||
// get next chunk
|
||||
return callback();
|
||||
},
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,33 +3,34 @@ import * as constants from '../../../constants';
|
|||
|
||||
/**
|
||||
* Constructs stringToSign for chunk
|
||||
* @param timestamp - date parsed from headers
|
||||
* in ISO 8601 format: YYYYMMDDTHHMMSSZ
|
||||
* @param credentialScope - items from auth
|
||||
* header plus the string 'aws4_request' joined with '/':
|
||||
* @param timestamp - date parsed from headers in ISO 8601 format: YYYYMMDDTHHMMSSZ
|
||||
* @param credentialScope - items from auth header plus the string
|
||||
* 'aws4_request' joined with '/':
|
||||
* timestamp/region/aws-service/aws4_request
|
||||
* @param lastSignature - signature from headers or prior chunk
|
||||
* @param justDataChunk - data portion of chunk
|
||||
* @returns stringToSign
|
||||
*/
|
||||
export default function constructChunkStringToSign(
|
||||
timestamp: string,
|
||||
credentialScope: string,
|
||||
lastSignature: string,
|
||||
justDataChunk?: Buffer | string,
|
||||
) {
|
||||
justDataChunk: string | Buffer | null
|
||||
): string {
|
||||
let currentChunkHash: string;
|
||||
// for last chunk, there will be no data, so use emptyStringHash
|
||||
if (!justDataChunk) {
|
||||
currentChunkHash = constants.emptyStringHash;
|
||||
} else {
|
||||
const hash = crypto.createHash('sha256');
|
||||
const temp = justDataChunk instanceof Buffer
|
||||
? hash.update(justDataChunk)
|
||||
: hash.update(justDataChunk, 'binary');
|
||||
currentChunkHash = temp.digest('hex');
|
||||
let hash = crypto.createHash('sha256');
|
||||
currentChunkHash = (
|
||||
typeof justDataChunk === 'string'
|
||||
? hash.update(justDataChunk, 'binary')
|
||||
: hash.update(justDataChunk)
|
||||
).digest('hex');
|
||||
}
|
||||
return `AWS4-HMAC-SHA256-PAYLOAD\n${timestamp}\n` +
|
||||
return (
|
||||
`AWS4-HMAC-SHA256-PAYLOAD\n${timestamp}\n` +
|
||||
`${credentialScope}\n${lastSignature}\n` +
|
||||
`${constants.emptyStringHash}\n${currentChunkHash}`;
|
||||
`${constants.emptyStringHash}\n${currentChunkHash}`
|
||||
);
|
||||
}
|
||||
|
|
|
@ -39,20 +39,27 @@ export function validateCredentials(
|
|||
// convert timestamp to format of scopeDate YYYYMMDD
|
||||
const timestampDate = timestamp.split('T')[0];
|
||||
if (scopeDate.length !== 8 || scopeDate !== timestampDate) {
|
||||
log.warn('scope date must be the same date as the timestamp date',
|
||||
{ scopeDate, timestampDate });
|
||||
log.warn('scope date must be the same date as the timestamp date', {
|
||||
scopeDate,
|
||||
timestampDate,
|
||||
});
|
||||
return errors.RequestTimeTooSkewed;
|
||||
}
|
||||
if (service !== 's3' && service !== 'iam' && service !== 'ring' &&
|
||||
service !== 'sts') {
|
||||
if (
|
||||
service !== 's3' &&
|
||||
service !== 'iam' &&
|
||||
service !== 'ring' &&
|
||||
service !== 'sts'
|
||||
) {
|
||||
log.warn('service in credentials is not one of s3/iam/ring/sts', {
|
||||
service,
|
||||
});
|
||||
return errors.InvalidArgument;
|
||||
}
|
||||
if (requestType !== 'aws4_request') {
|
||||
log.warn('requestType contained in params is not aws4_request',
|
||||
{ requestType });
|
||||
log.warn('requestType contained in params is not aws4_request', {
|
||||
requestType,
|
||||
});
|
||||
return errors.InvalidArgument;
|
||||
}
|
||||
return {};
|
||||
|
@ -78,8 +85,9 @@ export function extractQueryParams(
|
|||
|
||||
// Do not need the algorithm sent back
|
||||
if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') {
|
||||
log.warn('algorithm param incorrect',
|
||||
{ algo: queryObj['X-Amz-Algorithm'] });
|
||||
log.warn('algorithm param incorrect', {
|
||||
algo: queryObj['X-Amz-Algorithm'],
|
||||
});
|
||||
return authParams;
|
||||
}
|
||||
|
||||
|
@ -92,7 +100,6 @@ export function extractQueryParams(
|
|||
return authParams;
|
||||
}
|
||||
|
||||
|
||||
const signature = queryObj['X-Amz-Signature'];
|
||||
if (signature && signature.length === 64) {
|
||||
authParams.signatureFromRequest = signature;
|
||||
|
@ -105,14 +112,15 @@ export function extractQueryParams(
|
|||
if (timestamp && timestamp.length === 16) {
|
||||
authParams.timestamp = timestamp;
|
||||
} else {
|
||||
log.warn('missing or invalid timestamp',
|
||||
{ timestamp: queryObj['X-Amz-Date'] });
|
||||
log.warn('missing or invalid timestamp', {
|
||||
timestamp: queryObj['X-Amz-Date'],
|
||||
});
|
||||
return authParams;
|
||||
}
|
||||
|
||||
const expiry = Number.parseInt(queryObj['X-Amz-Expires'] ?? 'nope', 10);
|
||||
const sevenDays = 604800;
|
||||
if (expiry && (expiry > 0 && expiry <= sevenDays)) {
|
||||
if (expiry && expiry > 0 && expiry <= sevenDays) {
|
||||
authParams.expiry = expiry;
|
||||
} else {
|
||||
log.warn('invalid expiry', { expiry });
|
||||
|
@ -130,7 +138,6 @@ export function extractQueryParams(
|
|||
return authParams;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Extract and validate components from auth header
|
||||
* @param authHeader - authorization header from request
|
||||
|
@ -160,22 +167,28 @@ export function extractAuthItems(authHeader: string, log: Logger) {
|
|||
) {
|
||||
// @ts-ignore
|
||||
authItems.credentialsArr = credentialStr
|
||||
.trim().replace('Credential=', '').split('/');
|
||||
.trim()
|
||||
.replace('Credential=', '')
|
||||
.split('/');
|
||||
} else {
|
||||
log.warn('missing credentials');
|
||||
}
|
||||
log.trace('signed headers from request', { signedHeadersStr });
|
||||
if (signedHeadersStr && signedHeadersStr.trim()
|
||||
.startsWith('SignedHeaders=')) {
|
||||
if (
|
||||
signedHeadersStr &&
|
||||
signedHeadersStr.trim().startsWith('SignedHeaders=')
|
||||
) {
|
||||
authItems.signedHeaders = signedHeadersStr
|
||||
.trim().replace('SignedHeaders=', '');
|
||||
.trim()
|
||||
.replace('SignedHeaders=', '');
|
||||
} else {
|
||||
log.warn('missing signed headers');
|
||||
}
|
||||
log.trace('signature from request', { signatureStr });
|
||||
if (signatureStr && signatureStr.trim().startsWith('Signature=')) {
|
||||
authItems.signatureFromRequest = signatureStr
|
||||
.trim().replace('Signature=', '');
|
||||
.trim()
|
||||
.replace('Signature=', '');
|
||||
} else {
|
||||
log.warn('missing signature');
|
||||
}
|
||||
|
@ -196,9 +209,11 @@ export function areSignedHeadersComplete(signedHeaders: string, allHeaders: Head
|
|||
}
|
||||
const headers = Object.keys(allHeaders);
|
||||
for (let i = 0; i < headers.length; i++) {
|
||||
if ((headers[i].startsWith('x-amz-')
|
||||
|| headers[i].startsWith('x-scal-'))
|
||||
&& signedHeadersList.indexOf(headers[i]) === -1) {
|
||||
if (
|
||||
(headers[i].startsWith('x-amz-') ||
|
||||
headers[i].startsWith('x-scal-')) &&
|
||||
signedHeadersList.indexOf(headers[i]) === -1
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,569 +0,0 @@
|
|||
import cluster, { Worker } from 'cluster';
|
||||
import * as werelogs from 'werelogs';
|
||||
|
||||
import { default as errors } from '../../lib/errors';
|
||||
|
||||
const rpcLogger = new werelogs.Logger('ClusterRPC');
|
||||
|
||||
/**
|
||||
* Remote procedure calls support between cluster workers.
|
||||
*
|
||||
* When using the cluster module, new processes are forked and are
|
||||
* dispatched workloads, usually HTTP requests. The ClusterRPC module
|
||||
* implements a RPC system to send commands to all cluster worker
|
||||
* processes at once from any particular worker, and retrieve their
|
||||
* individual command results, like a distributed map operation.
|
||||
*
|
||||
* The existing nodejs cluster IPC channel is setup from the primary
|
||||
* to each worker, but not between workers, so there has to be a hop
|
||||
* by the primary.
|
||||
*
|
||||
* How a command is treated:
|
||||
*
|
||||
* - a worker sends a command message to the primary
|
||||
*
|
||||
* - the primary then forwards that command to each existing worker
|
||||
* (including the requestor)
|
||||
*
|
||||
* - each worker then executes the command and returns a result or an
|
||||
* error
|
||||
*
|
||||
* - the primary gathers all workers results into an array
|
||||
*
|
||||
* - finally, the primary dispatches the results array to the original
|
||||
* requesting worker
|
||||
*
|
||||
*
|
||||
* Limitations:
|
||||
*
|
||||
* - The command payload must be serializable, which means that:
|
||||
* - it should not contain circular references
|
||||
* - it should be of a reasonable size to be sent in a single RPC message
|
||||
*
|
||||
* - The "toWorkers" parameter of value "*" targets the set of workers
|
||||
* that are available at the time the command is dispatched. Any new
|
||||
* worker spawned after the command has been dispatched for
|
||||
* processing, but before the command completes, don't execute
|
||||
* the command and hence are not part of the results array.
|
||||
*
|
||||
*
|
||||
* To set it up:
|
||||
*
|
||||
* - On the primary:
|
||||
* if (cluster.isPrimary) {
|
||||
* setupRPCPrimary();
|
||||
* }
|
||||
*
|
||||
* - On the workers:
|
||||
* if (!cluster.isPrimary) {
|
||||
* setupRPCWorker({
|
||||
* handler1: (payload: object, uids: string, callback: HandlerCallback) => void,
|
||||
* handler2: ...
|
||||
* });
|
||||
* }
|
||||
* Handler functions will be passed the command payload, request
|
||||
* serialized uids, and must call the callback when the worker is done
|
||||
* processing the command:
|
||||
* callback(error: Error | null | undefined, result?: any)
|
||||
*
|
||||
* When this setup is done, any worker can start sending commands by calling
|
||||
* the async function sendWorkerCommand().
|
||||
*/
|
||||
|
||||
// exported types
|
||||
|
||||
export type ResultObject = {
|
||||
error: Error | null;
|
||||
result: any;
|
||||
};
|
||||
|
||||
/**
|
||||
* saved Promise for sendWorkerCommand
|
||||
*/
|
||||
export type CommandPromise = {
|
||||
resolve: (results?: ResultObject[]) => void;
|
||||
reject: (error: Error) => void;
|
||||
timeout: NodeJS.Timeout | null;
|
||||
};
|
||||
export type HandlerCallback = (error: (Error & { code?: number }) | null | undefined, result?: any) => void;
|
||||
export type HandlerFunction = (payload: object, uids: string, callback: HandlerCallback) => void;
|
||||
export type HandlersMap = {
|
||||
[index: string]: HandlerFunction;
|
||||
};
|
||||
export type PrimaryHandlerFunction = (worker: Worker, payload: object, uids: string, callback: HandlerCallback) => void;
|
||||
export type PrimaryHandlersMap = Record<string, PrimaryHandlerFunction>;
|
||||
|
||||
// private types
|
||||
|
||||
type RPCMessage<T extends string, P> = {
|
||||
type: T;
|
||||
uids: string;
|
||||
payload: P;
|
||||
};
|
||||
|
||||
type RPCCommandMessage = RPCMessage<'cluster-rpc:command', any> & {
|
||||
toWorkers: string;
|
||||
toHandler: string;
|
||||
};
|
||||
|
||||
type MarshalledResultObject = {
|
||||
error: string | null;
|
||||
errorCode?: number;
|
||||
result: any;
|
||||
};
|
||||
|
||||
type RPCCommandResultMessage = RPCMessage<'cluster-rpc:commandResult', MarshalledResultObject>;
|
||||
|
||||
type RPCCommandResultsMessage = RPCMessage<'cluster-rpc:commandResults', {
|
||||
results: MarshalledResultObject[];
|
||||
}>;
|
||||
|
||||
type RPCCommandErrorMessage = RPCMessage<'cluster-rpc:commandError', {
|
||||
error: string;
|
||||
}>;
|
||||
|
||||
interface RPCSetupOptions {
|
||||
/**
|
||||
* As werelogs is not a peerDependency, arsenal and a parent project
|
||||
* might have their own separate versions duplicated in dependencies.
|
||||
* The config are therefore not shared.
|
||||
* Use this to propagate werelogs config to arsenal's ClusterRPC.
|
||||
*/
|
||||
werelogsConfig?: Parameters<typeof werelogs.configure>[0];
|
||||
};
|
||||
|
||||
/**
|
||||
* In primary: store worker IDs that are waiting to be dispatched
|
||||
* their command's results, as a mapping.
|
||||
*/
|
||||
const uidsToWorkerId: {
|
||||
[index: string]: number;
|
||||
} = {};
|
||||
|
||||
|
||||
/**
|
||||
* In primary: store worker responses for commands in progress as a
|
||||
* mapping.
|
||||
*
|
||||
* Result objects are 'null' while the worker is still processing the
|
||||
* command. When a worker finishes processing it stores the result as:
|
||||
* {
|
||||
* error: string | null,
|
||||
* result: any
|
||||
* }
|
||||
*/
|
||||
const uidsToCommandResults: {
|
||||
[index: string]: {
|
||||
[index: number]: MarshalledResultObject | null;
|
||||
};
|
||||
} = {};
|
||||
|
||||
/**
|
||||
* In workers: store promise callbacks for commands waiting to be
|
||||
* dispatched, as a mapping.
|
||||
*/
|
||||
const uidsToCommandPromise: {
|
||||
[index: string]: CommandPromise;
|
||||
} = {};
|
||||
|
||||
|
||||
function _isRpcMessage(message) {
|
||||
return (message !== null &&
|
||||
typeof message === 'object' &&
|
||||
typeof message.type === 'string' &&
|
||||
message.type.startsWith('cluster-rpc:'));
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup cluster RPC system on the primary
|
||||
*
|
||||
* @param {object} [handlers] - mapping of handler names to handler functions
|
||||
* handler function:
|
||||
* `handler({Worker} worker, {object} payload, {string} uids, {function} callback)`
|
||||
* handler callback must be called when worker is done with the command:
|
||||
* `callback({Error|null} error, {any} [result])`
|
||||
* @return {undefined}
|
||||
*/
|
||||
export function setupRPCPrimary(handlers?: PrimaryHandlersMap, options?: RPCSetupOptions) {
|
||||
if (options?.werelogsConfig) {
|
||||
werelogs.configure(options.werelogsConfig);
|
||||
}
|
||||
cluster.on('message', (worker, message) => {
|
||||
if (_isRpcMessage(message)) {
|
||||
_handlePrimaryMessage(worker, message, handlers);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup RPCs on a cluster worker process
|
||||
*
|
||||
* @param {object} handlers - mapping of handler names to handler functions
|
||||
* handler function:
|
||||
* handler({object} payload, {string} uids, {function} callback)
|
||||
* handler callback must be called when worker is done with the command:
|
||||
* callback({Error|null} error, {any} [result])
|
||||
* @return {undefined}
|
||||
* }
|
||||
*/
|
||||
export function setupRPCWorker(handlers: HandlersMap, options?: RPCSetupOptions) {
|
||||
if (!process.send) {
|
||||
throw new Error('fatal: cannot setup cluster RPC: "process.send" is not available');
|
||||
}
|
||||
if (options?.werelogsConfig) {
|
||||
werelogs.configure(options.werelogsConfig);
|
||||
}
|
||||
process.on('message', (message: RPCCommandMessage | RPCCommandResultsMessage) => {
|
||||
if (_isRpcMessage(message)) {
|
||||
_handleWorkerMessage(message, handlers);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a command for workers to execute in parallel, and wait for results
|
||||
*
|
||||
* @param {string} toWorkers - which workers should execute the command
|
||||
* Currently the supported values are:
|
||||
* - "*", meaning all workers will execute the command
|
||||
* - "PRIMARY", meaning primary process will execute the command
|
||||
* @param {string} toHandler - name of handler that will execute the
|
||||
* command in workers, as declared in setupRPCWorker() parameter object
|
||||
* @param {string} uids - unique identifier of the command, must be
|
||||
* unique across all commands in progress
|
||||
* @param {object} payload - message payload, sent as-is to the handler
|
||||
* @param {number} [timeoutMs=60000] - timeout the command with a
|
||||
* "RequestTimeout" error after this number of milliseconds - set to 0
|
||||
* to disable timeouts (the command may then hang forever)
|
||||
* @returns {Promise}
|
||||
*/
|
||||
export async function sendWorkerCommand(
|
||||
toWorkers: string,
|
||||
toHandler: string,
|
||||
uids: string,
|
||||
payload: object,
|
||||
timeoutMs: number = 60000
|
||||
) {
|
||||
if (typeof uids !== 'string') {
|
||||
rpcLogger.error('missing or invalid "uids" field', { uids });
|
||||
throw errors.MissingParameter;
|
||||
}
|
||||
if (uidsToCommandPromise[uids] !== undefined) {
|
||||
rpcLogger.error('a command is already in progress with same uids', { uids });
|
||||
throw errors.OperationAborted;
|
||||
}
|
||||
rpcLogger.info('sending command', { toWorkers, toHandler, uids, payload });
|
||||
return new Promise((resolve, reject) => {
|
||||
let timeout: NodeJS.Timeout | null = null;
|
||||
if (timeoutMs) {
|
||||
timeout = setTimeout(() => {
|
||||
delete uidsToCommandPromise[uids];
|
||||
reject(errors.RequestTimeout);
|
||||
}, timeoutMs);
|
||||
}
|
||||
uidsToCommandPromise[uids] = { resolve, reject, timeout };
|
||||
const message: RPCCommandMessage = {
|
||||
type: 'cluster-rpc:command',
|
||||
toWorkers,
|
||||
toHandler,
|
||||
uids,
|
||||
payload,
|
||||
};
|
||||
return process.send?.(message);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the number of commands in flight
|
||||
* @returns {number}
|
||||
*/
|
||||
export function getPendingCommandsCount() {
|
||||
return Object.keys(uidsToCommandPromise).length;
|
||||
}
|
||||
|
||||
|
||||
function _dispatchCommandResultsToWorker(
|
||||
worker: Worker,
|
||||
uids: string,
|
||||
resultsArray: MarshalledResultObject[]
|
||||
): void {
|
||||
const message: RPCCommandResultsMessage = {
|
||||
type: 'cluster-rpc:commandResults',
|
||||
uids,
|
||||
payload: {
|
||||
results: resultsArray,
|
||||
},
|
||||
};
|
||||
worker.send(message);
|
||||
}
|
||||
|
||||
function _dispatchCommandErrorToWorker(
|
||||
worker: Worker,
|
||||
uids: string,
|
||||
error: Error,
|
||||
): void {
|
||||
const message: RPCCommandErrorMessage = {
|
||||
type: 'cluster-rpc:commandError',
|
||||
uids,
|
||||
payload: {
|
||||
error: error.message,
|
||||
},
|
||||
};
|
||||
worker.send(message);
|
||||
}
|
||||
|
||||
function _sendPrimaryCommandResult(
|
||||
worker: Worker,
|
||||
uids: string,
|
||||
error: (Error & { code?: number }) | null | undefined,
|
||||
result?: any
|
||||
): void {
|
||||
const message: RPCCommandResultsMessage = {
|
||||
type: 'cluster-rpc:commandResults',
|
||||
uids,
|
||||
payload: {
|
||||
results: [{ error: error?.message || null, errorCode: error?.code, result }],
|
||||
},
|
||||
};
|
||||
worker.send?.(message);
|
||||
}
|
||||
|
||||
function _handlePrimaryCommandMessage(
|
||||
fromWorker: Worker,
|
||||
logger: any,
|
||||
message: RPCCommandMessage,
|
||||
handlers?: PrimaryHandlersMap
|
||||
): void {
|
||||
const { toWorkers, toHandler, uids, payload } = message;
|
||||
if (toWorkers === '*') {
|
||||
if (uidsToWorkerId[uids] !== undefined) {
|
||||
logger.warn('new command already has a waiting worker with same uids', {
|
||||
uids, workerId: uidsToWorkerId[uids],
|
||||
});
|
||||
return undefined;
|
||||
}
|
||||
const commandResults = {};
|
||||
for (const workerId of Object.keys(cluster.workers || {})) {
|
||||
commandResults[workerId] = null;
|
||||
}
|
||||
uidsToWorkerId[uids] = fromWorker?.id;
|
||||
uidsToCommandResults[uids] = commandResults;
|
||||
|
||||
for (const [workerId, worker] of Object.entries(cluster.workers || {})) {
|
||||
logger.debug('sending command message to worker', {
|
||||
workerId, toHandler, payload,
|
||||
});
|
||||
if (worker) {
|
||||
worker.send(message);
|
||||
}
|
||||
}
|
||||
} else if (toWorkers === 'PRIMARY') {
|
||||
const { toHandler, uids, payload } = message;
|
||||
const cb: HandlerCallback = (err, result) => _sendPrimaryCommandResult(fromWorker, uids, err, result);
|
||||
|
||||
if (toHandler in (handlers || {})) {
|
||||
return handlers![toHandler](fromWorker, payload, uids, cb);
|
||||
}
|
||||
logger.error('no such handler in "toHandler" field from worker command message', {
|
||||
toHandler,
|
||||
});
|
||||
return cb(errors.NotImplemented);
|
||||
} else {
|
||||
logger.error('unsupported "toWorkers" field from worker command message', {
|
||||
toWorkers,
|
||||
});
|
||||
if (fromWorker) {
|
||||
_dispatchCommandErrorToWorker(fromWorker, uids, errors.NotImplemented);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function _handlePrimaryCommandResultMessage(
|
||||
fromWorkerId: number,
|
||||
logger: any,
|
||||
message: RPCCommandResultMessage
|
||||
): void {
|
||||
const { uids, payload } = message;
|
||||
const commandResults = uidsToCommandResults[uids];
|
||||
if (!commandResults) {
|
||||
logger.warn('received command response message from worker for command not in flight', {
|
||||
workerId: fromWorkerId,
|
||||
uids,
|
||||
});
|
||||
return undefined;
|
||||
}
|
||||
if (commandResults[fromWorkerId] === undefined) {
|
||||
logger.warn('received command response message with unexpected worker ID', {
|
||||
workerId: fromWorkerId,
|
||||
uids,
|
||||
});
|
||||
return undefined;
|
||||
}
|
||||
if (commandResults[fromWorkerId] !== null) {
|
||||
logger.warn('ignoring duplicate command response from worker', {
|
||||
workerId: fromWorkerId,
|
||||
uids,
|
||||
});
|
||||
return undefined;
|
||||
}
|
||||
commandResults[fromWorkerId] = payload;
|
||||
const commandResultsArray = Object.values(commandResults);
|
||||
if (commandResultsArray.every(response => response !== null)) {
|
||||
logger.debug('all workers responded to command', { uids });
|
||||
const completeCommandResultsArray = <MarshalledResultObject[]> commandResultsArray;
|
||||
const toWorkerId = uidsToWorkerId[uids];
|
||||
const toWorker = cluster.workers?.[toWorkerId];
|
||||
|
||||
delete uidsToCommandResults[uids];
|
||||
delete uidsToWorkerId[uids];
|
||||
|
||||
if (!toWorker) {
|
||||
logger.warn('worker shut down while its command was executing', {
|
||||
workerId: toWorkerId, uids,
|
||||
});
|
||||
return undefined;
|
||||
}
|
||||
// send back response to original worker
|
||||
_dispatchCommandResultsToWorker(toWorker, uids, completeCommandResultsArray);
|
||||
}
|
||||
}
|
||||
|
||||
function _handlePrimaryMessage(
|
||||
fromWorker: Worker,
|
||||
message: RPCCommandMessage | RPCCommandResultMessage,
|
||||
handlers?: PrimaryHandlersMap
|
||||
): void {
|
||||
const { type: messageType, uids } = message;
|
||||
const logger = rpcLogger.newRequestLoggerFromSerializedUids(uids);
|
||||
logger.debug('primary received message from worker', {
|
||||
workerId: fromWorker?.id, rpcMessage: message,
|
||||
});
|
||||
if (messageType === 'cluster-rpc:command') {
|
||||
return _handlePrimaryCommandMessage(fromWorker, logger, message, handlers);
|
||||
}
|
||||
if (messageType === 'cluster-rpc:commandResult') {
|
||||
return _handlePrimaryCommandResultMessage(fromWorker?.id, logger, message);
|
||||
}
|
||||
logger.error('unsupported message type', {
|
||||
workerId: fromWorker?.id, messageType, uids,
|
||||
});
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function _sendWorkerCommandResult(
|
||||
uids: string,
|
||||
error: Error | null | undefined,
|
||||
result?: any
|
||||
): void {
|
||||
const message: RPCCommandResultMessage = {
|
||||
type: 'cluster-rpc:commandResult',
|
||||
uids,
|
||||
payload: {
|
||||
error: error ? error.message : null,
|
||||
result,
|
||||
},
|
||||
};
|
||||
process.send?.(message);
|
||||
}
|
||||
|
||||
function _handleWorkerCommandMessage(
|
||||
logger: any,
|
||||
message: RPCCommandMessage,
|
||||
handlers: HandlersMap
|
||||
): void {
|
||||
const { toHandler, uids, payload } = message;
|
||||
const cb: HandlerCallback = (err, result) => _sendWorkerCommandResult(uids, err, result);
|
||||
|
||||
if (toHandler in handlers) {
|
||||
return handlers[toHandler](payload, uids, cb);
|
||||
}
|
||||
logger.error('no such handler in "toHandler" field from worker command message', {
|
||||
toHandler,
|
||||
});
|
||||
return cb(errors.NotImplemented);
|
||||
}
|
||||
|
||||
function _handleWorkerCommandResultsMessage(
|
||||
logger: any,
|
||||
message: RPCCommandResultsMessage,
|
||||
): void {
|
||||
const { uids, payload } = message;
|
||||
const { results } = payload;
|
||||
const commandPromise: CommandPromise = uidsToCommandPromise[uids];
|
||||
if (commandPromise === undefined) {
|
||||
logger.error('missing promise for command results', { uids, payload });
|
||||
return undefined;
|
||||
}
|
||||
if (commandPromise.timeout) {
|
||||
clearTimeout(commandPromise.timeout);
|
||||
}
|
||||
delete uidsToCommandPromise[uids];
|
||||
const unmarshalledResults = results.map(workerResult => {
|
||||
let workerError: Error | null = null;
|
||||
if (workerResult.error) {
|
||||
if (workerResult.error in errors) {
|
||||
workerError = errors[workerResult.error];
|
||||
} else {
|
||||
workerError = new Error(workerResult.error);
|
||||
}
|
||||
}
|
||||
if (workerError && workerResult.errorCode) {
|
||||
(workerError as Error & { code: number }).code = workerResult.errorCode;
|
||||
}
|
||||
const unmarshalledResult: ResultObject = {
|
||||
error: workerError,
|
||||
result: workerResult.result,
|
||||
};
|
||||
return unmarshalledResult;
|
||||
});
|
||||
return commandPromise.resolve(unmarshalledResults);
|
||||
}
|
||||
|
||||
function _handleWorkerCommandErrorMessage(
|
||||
logger: any,
|
||||
message: RPCCommandErrorMessage,
|
||||
): void {
|
||||
const { uids, payload } = message;
|
||||
const { error } = payload;
|
||||
const commandPromise: CommandPromise = uidsToCommandPromise[uids];
|
||||
if (commandPromise === undefined) {
|
||||
logger.error('missing promise for command results', { uids, payload });
|
||||
return undefined;
|
||||
}
|
||||
if (commandPromise.timeout) {
|
||||
clearTimeout(commandPromise.timeout);
|
||||
}
|
||||
delete uidsToCommandPromise[uids];
|
||||
let commandError: Error | null = null;
|
||||
if (error in errors) {
|
||||
commandError = errors[error];
|
||||
} else {
|
||||
commandError = new Error(error);
|
||||
}
|
||||
return commandPromise.reject(<Error> commandError);
|
||||
}
|
||||
|
||||
function _handleWorkerMessage(
|
||||
message: RPCCommandMessage | RPCCommandResultsMessage | RPCCommandErrorMessage,
|
||||
handlers: HandlersMap
|
||||
): void {
|
||||
const { type: messageType, uids } = message;
|
||||
const workerId = cluster.worker?.id;
|
||||
const logger = rpcLogger.newRequestLoggerFromSerializedUids(uids);
|
||||
logger.debug('worker received message from primary', {
|
||||
workerId, rpcMessage: message,
|
||||
});
|
||||
if (messageType === 'cluster-rpc:command') {
|
||||
return _handleWorkerCommandMessage(logger, message, handlers);
|
||||
}
|
||||
if (messageType === 'cluster-rpc:commandResults') {
|
||||
return _handleWorkerCommandResultsMessage(logger, message);
|
||||
}
|
||||
if (messageType === 'cluster-rpc:commandError') {
|
||||
return _handleWorkerCommandErrorMessage(logger, message);
|
||||
}
|
||||
logger.error('unsupported message type', {
|
||||
workerId, messageType,
|
||||
});
|
||||
return undefined;
|
||||
}
|
|
@ -1,13 +1,12 @@
|
|||
import * as crypto from 'crypto';
|
||||
|
||||
// The min value here is to manage further backward compat if we
|
||||
// need it
|
||||
// The min value here is to manage further backward compat if we need it
|
||||
// Default value
|
||||
export const vaultGeneratedIamSecurityTokenSizeMin = 128;
|
||||
const vaultGeneratedIamSecurityTokenSizeMin = 128;
|
||||
// Safe to assume that a typical token size is less than 8192 bytes
|
||||
export const vaultGeneratedIamSecurityTokenSizeMax = 8192;
|
||||
const vaultGeneratedIamSecurityTokenSizeMax = 8192;
|
||||
// Base-64
|
||||
export const vaultGeneratedIamSecurityTokenPattern = /^[A-Za-z0-9/+=]*$/;
|
||||
const vaultGeneratedIamSecurityTokenPattern = /^[A-Za-z0-9/+=]*$/;
|
||||
|
||||
// info about the iam security token
|
||||
export const iamSecurityToken = {
|
||||
|
@ -35,13 +34,7 @@ export const emptyFileMd5 = 'd41d8cd98f00b204e9800998ecf8427e';
|
|||
// Version 4 add the Creation-Time and Content-Language attributes,
|
||||
// and add support for x-ms-meta-* headers in UserMetadata
|
||||
// Version 5 adds the azureInfo structure
|
||||
// Version 6 adds a "deleted" flag that is updated to true before
|
||||
// the object gets deleted. This is done to keep object metadata in the
|
||||
// oplog when deleting the object, as oplog deletion events don't contain
|
||||
// any metadata of the object.
|
||||
// version 6 also adds the "isPHD" flag that is used to indicate that the master
|
||||
// object is a placeholder and is not up to date.
|
||||
export const mdModelVersion = 6;
|
||||
export const mdModelVersion = 5;
|
||||
/*
|
||||
* Splitter is used to build the object name for the overview of a
|
||||
* multipart upload and to build the object names for each part of a
|
||||
|
@ -94,6 +87,7 @@ export const zenkoSeparator = ':';
|
|||
/* eslint-disable camelcase */
|
||||
export const externalBackends = { aws_s3: true, azure: true, gcp: true, pfs: true };
|
||||
export const replicationBackends = { aws_s3: true, azure: true, gcp: true };
|
||||
|
||||
// hex digest of sha256 hash of empty string:
|
||||
export const emptyStringHash = crypto.createHash('sha256')
|
||||
.update('', 'binary').digest('hex');
|
||||
|
@ -132,19 +126,6 @@ export const supportedNotificationEvents = new Set([
|
|||
's3:ObjectRemoved:*',
|
||||
's3:ObjectRemoved:Delete',
|
||||
's3:ObjectRemoved:DeleteMarkerCreated',
|
||||
's3:Replication:OperationFailedReplication',
|
||||
's3:ObjectTagging:*',
|
||||
's3:ObjectTagging:Put',
|
||||
's3:ObjectTagging:Delete',
|
||||
's3:ObjectAcl:Put',
|
||||
's3:ObjectRestore:*',
|
||||
's3:ObjectRestore:Post',
|
||||
's3:ObjectRestore:Completed',
|
||||
's3:ObjectRestore:Delete',
|
||||
's3:LifecycleTransition',
|
||||
's3:LifecycleExpiration:*',
|
||||
's3:LifecycleExpiration:DeleteMarkerCreated',
|
||||
's3:LifecycleExpiration:Delete',
|
||||
]);
|
||||
export const notificationArnPrefix = 'arn:scality:bucketnotif';
|
||||
// HTTP server keep-alive timeout is set to a higher value than
|
||||
|
@ -163,15 +144,4 @@ export const supportedLifecycleRules = [
|
|||
'expiration',
|
||||
'noncurrentVersionExpiration',
|
||||
'abortIncompleteMultipartUpload',
|
||||
'transitions',
|
||||
'noncurrentVersionTransition',
|
||||
];
|
||||
// Maximum number of buckets to cache (bucket metadata)
|
||||
export const maxCachedBuckets = process.env.METADATA_MAX_CACHED_BUCKETS ?
|
||||
Number(process.env.METADATA_MAX_CACHED_BUCKETS) : 1000;
|
||||
|
||||
export const validRestoreObjectTiers = new Set(['Expedited', 'Standard', 'Bulk']);
|
||||
export const maxBatchingConcurrentOperations = 5;
|
||||
|
||||
/** For policy resource arn check we allow empty account ID to not break compatibility */
|
||||
export const policyArnAllowedEmptyAccountId = ['utapi', 'scuba'];
|
||||
|
|
62
lib/db.ts
62
lib/db.ts
|
@ -1,3 +1,7 @@
|
|||
import { LevelDB } from 'level';
|
||||
|
||||
const writeOptions = { sync: true };
|
||||
|
||||
/**
|
||||
* Like Error, but with a property set to true.
|
||||
* TODO: this is copied from kineticlib, should consolidate with the
|
||||
|
@ -10,15 +14,13 @@
|
|||
* use:
|
||||
* throw propError("badTypeInput", "input is not a buffer");
|
||||
*
|
||||
* @param propName - the property name.
|
||||
* @param message - the Error message.
|
||||
* @returns the Error object.
|
||||
* @param {String} propName - the property name.
|
||||
* @param {String} message - the Error message.
|
||||
* @returns {Error} the Error object.
|
||||
*/
|
||||
function propError(propName: string, message: string): Error {
|
||||
const err = new Error(message);
|
||||
err[propName] = true;
|
||||
// @ts-ignore
|
||||
err.is = { [propName]: true };
|
||||
return err;
|
||||
}
|
||||
|
||||
|
@ -26,20 +28,15 @@ function propError(propName: string, message: string): Error {
|
|||
* Running transaction with multiple updates to be committed atomically
|
||||
*/
|
||||
export class IndexTransaction {
|
||||
operations: { type: 'put' | 'del'; key: string; value?: any }[];
|
||||
db: any;
|
||||
closed: boolean;
|
||||
conditions: { [key: string]: string }[];
|
||||
|
||||
/**
|
||||
* Builds a new transaction
|
||||
*
|
||||
* @argument {Leveldb} db an open database to which the updates
|
||||
* will be applied
|
||||
*
|
||||
* @returns a new empty transaction
|
||||
* @returns {IndexTransaction} a new empty transaction
|
||||
*/
|
||||
constructor(db: any) {
|
||||
constructor(db: LevelDB) {
|
||||
this.operations = [];
|
||||
this.db = db;
|
||||
this.closed = false;
|
||||
|
@ -49,22 +46,22 @@ export class IndexTransaction {
|
|||
/**
|
||||
* Adds a new operation to participate in this running transaction
|
||||
*
|
||||
* @argument op an object with the following attributes:
|
||||
* @argument {object} op an object with the following attributes:
|
||||
* {
|
||||
* type: 'put' or 'del',
|
||||
* key: the object key,
|
||||
* value: (optional for del) the value to store,
|
||||
* }
|
||||
*
|
||||
* @throws an error described by the following properties
|
||||
* @throws {Error} an error described by the following properties
|
||||
* - invalidTransactionVerb if op is not put or del
|
||||
* - pushOnCommittedTransaction if already committed
|
||||
* - missingKey if the key is missing from the op
|
||||
* - missingValue if putting without a value
|
||||
*
|
||||
* @returns {undefined}
|
||||
*/
|
||||
push(op: { type: 'put'; key: string; value: any }): void;
|
||||
push(op: { type: 'del'; key: string }): void;
|
||||
push(op: { type: 'put' | 'del'; key: string; value?: any }): void {
|
||||
push(op) {
|
||||
if (this.closed) {
|
||||
throw propError(
|
||||
'pushOnCommittedTransaction',
|
||||
|
@ -100,42 +97,48 @@ export class IndexTransaction {
|
|||
* - pushOnCommittedTransaction if already committed
|
||||
* - missingKey if the key is missing from the op
|
||||
* - missingValue if putting without a value
|
||||
*
|
||||
* @returns {undefined}
|
||||
*
|
||||
* @see push
|
||||
*/
|
||||
put(key: string, value: any) {
|
||||
put(key, value) {
|
||||
this.push({ type: 'put', key, value });
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a new del operation to this running transaction
|
||||
*
|
||||
* @argument key - the key of the object to delete
|
||||
* @argument {string} key - the key of the object to delete
|
||||
*
|
||||
* @throws an error described by the following properties
|
||||
* @throws {Error} an error described by the following properties
|
||||
* - pushOnCommittedTransaction if already committed
|
||||
* - missingKey if the key is missing from the op
|
||||
*
|
||||
* @returns {undefined}
|
||||
*
|
||||
* @see push
|
||||
*/
|
||||
del(key: string) {
|
||||
del(key) {
|
||||
this.push({ type: 'del', key });
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a condition for the transaction
|
||||
*
|
||||
* @argument condition an object with the following attributes:
|
||||
* @argument {object} condition an object with the following attributes:
|
||||
* {
|
||||
* <condition>: the object key
|
||||
* }
|
||||
* example: { notExists: 'key1' }
|
||||
*
|
||||
* @throws an error described by the following properties
|
||||
* @throws {Error} an error described by the following properties
|
||||
* - pushOnCommittedTransaction if already committed
|
||||
* - missingCondition if the condition is empty
|
||||
*
|
||||
* @returns {undefined}
|
||||
*/
|
||||
addCondition(condition: { [key: string]: string }) {
|
||||
addCondition(condition) {
|
||||
if (this.closed) {
|
||||
throw propError(
|
||||
'pushOnCommittedTransaction',
|
||||
|
@ -148,7 +151,7 @@ export class IndexTransaction {
|
|||
'missing condition for conditional put'
|
||||
);
|
||||
}
|
||||
if (typeof condition.notExists !== 'string' && typeof condition.exists !== 'string') {
|
||||
if (typeof condition.notExists !== 'string') {
|
||||
throw propError(
|
||||
'unsupportedConditionalOperation',
|
||||
'missing key or supported condition'
|
||||
|
@ -160,11 +163,12 @@ export class IndexTransaction {
|
|||
/**
|
||||
* Applies the queued updates in this transaction atomically.
|
||||
*
|
||||
* @argument cb function to be called when the commit
|
||||
* @argument {function} cb function to be called when the commit
|
||||
* finishes, taking an optional error argument
|
||||
*
|
||||
* @returns {undefined}
|
||||
*/
|
||||
commit(cb: (error: Error | null, data?: any) => void) {
|
||||
commit(cb) {
|
||||
if (this.closed) {
|
||||
return cb(
|
||||
propError(
|
||||
|
@ -184,11 +188,11 @@ export class IndexTransaction {
|
|||
}
|
||||
|
||||
this.closed = true;
|
||||
const options = { sync: true, conditions: this.conditions };
|
||||
writeOptions.conditions = this.conditions;
|
||||
|
||||
// The array-of-operations variant of the `batch` method
|
||||
// allows passing options such has `sync: true` whereas the
|
||||
// chained form does not.
|
||||
return this.db.batch(this.operations, options, cb);
|
||||
return this.db.batch(this.operations, writeOptions, cb);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
export interface ErrorLike {
|
||||
message: any;
|
||||
code: any;
|
||||
stack: any;
|
||||
name: any;
|
||||
}
|
||||
|
||||
export function reshapeExceptionError(error: ErrorLike) {
|
||||
export function reshapeExceptionError(error) {
|
||||
const { message, code, stack, name } = error;
|
||||
return { message, code, stack, name };
|
||||
return {
|
||||
message,
|
||||
code,
|
||||
stack,
|
||||
name,
|
||||
};
|
||||
}
|
||||
|
|
|
@ -13,7 +13,6 @@ export const AccessForbidden: ErrorFormat = {
|
|||
code: 403,
|
||||
description: 'Access Forbidden',
|
||||
};
|
||||
|
||||
export const AccountProblem: ErrorFormat = {
|
||||
code: 403,
|
||||
description:
|
||||
|
@ -42,7 +41,7 @@ export const BucketAlreadyOwnedByYou: ErrorFormat = {
|
|||
code: 409,
|
||||
|
||||
description:
|
||||
'A bucket with this name exists and is already owned by you',
|
||||
'Your previous request to create the named bucket succeeded and you already own it. You get this error in all AWS regions except US Standard, us-east-1. In us-east-1 region, you will get 200 OK, but it is no-op (if bucket exists S3 will not do anything).',
|
||||
};
|
||||
|
||||
export const BucketNotEmpty: ErrorFormat = {
|
||||
|
@ -365,11 +364,6 @@ export const NoSuchWebsiteConfiguration: ErrorFormat = {
|
|||
description: 'The specified bucket does not have a website configuration',
|
||||
};
|
||||
|
||||
export const NoSuchTagSet: ErrorFormat = {
|
||||
code: 404,
|
||||
description: 'The TagSet does not exist',
|
||||
};
|
||||
|
||||
export const NoSuchUpload: ErrorFormat = {
|
||||
code: 404,
|
||||
description:
|
||||
|
@ -690,11 +684,6 @@ export const ReportNotPresent: ErrorFormat = {
|
|||
'The request was rejected because the credential report does not exist. To generate a credential report, use GenerateCredentialReport.',
|
||||
};
|
||||
|
||||
export const Found: ErrorFormat = {
|
||||
code: 302,
|
||||
description: 'Resource Found'
|
||||
};
|
||||
|
||||
// ------------- Special non-AWS S3 errors -------------
|
||||
|
||||
export const MPUinProgress: ErrorFormat = {
|
||||
|
@ -1042,15 +1031,3 @@ export const AuthMethodNotImplemented: ErrorFormat = {
|
|||
description: 'AuthMethodNotImplemented',
|
||||
code: 501,
|
||||
};
|
||||
|
||||
// --------------------- quotaErros ---------------------
|
||||
|
||||
export const NoSuchQuota: ErrorFormat = {
|
||||
code: 404,
|
||||
description: 'The specified resource does not have a quota.',
|
||||
};
|
||||
|
||||
export const QuotaExceeded: ErrorFormat = {
|
||||
code: 429,
|
||||
description: 'The quota set for the resource is exceeded.',
|
||||
};
|
|
@ -1,39 +1,22 @@
|
|||
import type { ServerResponse } from 'http';
|
||||
import * as rawErrors from './arsenalErrors';
|
||||
import * as http from 'http';
|
||||
import * as rawErrors from './arsenal-errors';
|
||||
import * as types from './types';
|
||||
|
||||
/** All possible errors names. */
|
||||
export type Name = keyof typeof rawErrors;
|
||||
/** Object containing all errors names. It has the format { [Name]: "Name" } */
|
||||
export type Names = { [Name_ in Name]: Name_ };
|
||||
/** Mapping used to determine an error type. It has the format { [Name]: boolean } */
|
||||
export type Is = { [_ in Name]: boolean };
|
||||
/** Mapping of all possible Errors. It has the format { [Name]: Error } */
|
||||
export type Errors = { [_ in Name]: ArsenalError };
|
||||
export * from './types';
|
||||
|
||||
// This object is reused constantly through createIs, we store it there
|
||||
// to avoid recomputation.
|
||||
const isBase = Object.fromEntries(
|
||||
Object.keys(rawErrors).map((key) => [key, false])
|
||||
) as Is;
|
||||
/** Mapping used to determine an error type. */
|
||||
export type Is = { [Name in types.Name]: boolean };
|
||||
/** Mapping of all possible Errors */
|
||||
export type Errors = { [Property in keyof types.Names]: ArsenalError };
|
||||
|
||||
// This allows to conditionally add the old behavior of errors to properly
|
||||
// test migration.
|
||||
// Activate CI tests with `ALLOW_UNSAFE_ERROR_COMPARISON=false yarn test`.
|
||||
// Remove this mechanism in ARSN-176.
|
||||
export const allowUnsafeErrComp = (
|
||||
process.env.ALLOW_UNSAFE_ERROR_COMPARISON ?? 'true') === 'true'
|
||||
const entries = Object.entries(rawErrors);
|
||||
|
||||
// This contains some metaprog. Be careful.
|
||||
// Proxy can be found on MDN.
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Proxy
|
||||
// While this could seems better to avoid metaprog, this allows us to enforce
|
||||
// type-checking properly while avoiding all errors that could happen at runtime.
|
||||
// Even if some errors are made in JavaScript, like using err.is.NonExistingError,
|
||||
// the Proxy will return false.
|
||||
const createIs = (type: Name): Is => {
|
||||
const get = (is: Is, value: string | symbol) => is[value] ?? false;
|
||||
const final = Object.freeze({ ...isBase, [type]: true });
|
||||
return new Proxy(final, { get });
|
||||
const createIs = (type: types.Name) => {
|
||||
const get = (_: {}, value: string | symbol) => type === value;
|
||||
return new Proxy({}, { get }) as Is;
|
||||
};
|
||||
|
||||
export class ArsenalError extends Error {
|
||||
|
@ -41,32 +24,18 @@ export class ArsenalError extends Error {
|
|||
#code: number;
|
||||
/** Text description of the error. */
|
||||
#description: string;
|
||||
/** Type of the error. */
|
||||
#type: Name;
|
||||
/** Type of the error. Belongs to errors.types. */
|
||||
#type: types.Name;
|
||||
/** Object used to determine the error type.
|
||||
* Example: error.is.InternalError */
|
||||
#is: Is;
|
||||
/** A map of error metadata (can be extra fields
|
||||
* that only show in debug mode) */
|
||||
#metadata: Map<string, Object[]>;
|
||||
|
||||
private constructor(type: Name, code: number, description: string,
|
||||
metadata?: Map<string, Object[]>) {
|
||||
private constructor(type: types.Name, code: number, description: string) {
|
||||
super(type);
|
||||
this.#code = code;
|
||||
this.#description = description;
|
||||
this.#type = type;
|
||||
this.#is = createIs(type);
|
||||
this.#metadata = metadata ?? new Map<string, Object[]>();
|
||||
|
||||
// This restores the old behavior of errors, to make sure they're now
|
||||
// backward-compatible. Fortunately it's handled by TS, but it cannot
|
||||
// be type-checked. This means we have to be extremely careful about
|
||||
// what we're doing when using errors.
|
||||
// Disables the feature when in CI tests but not in production.
|
||||
if (allowUnsafeErrComp) {
|
||||
this[type] = true;
|
||||
}
|
||||
}
|
||||
|
||||
/** Output the error as a JSON string */
|
||||
|
@ -76,32 +45,8 @@ export class ArsenalError extends Error {
|
|||
return JSON.stringify({ errorType, errorMessage });
|
||||
}
|
||||
|
||||
flatten() {
|
||||
return {
|
||||
is_arsenal_error: true,
|
||||
code: this.#code,
|
||||
description: this.#description,
|
||||
type: this.#type,
|
||||
stack: this.stack
|
||||
}
|
||||
}
|
||||
|
||||
static unflatten(flat_obj) {
|
||||
if (!flat_obj.is_arsenal_error) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const err = new ArsenalError(
|
||||
flat_obj.type,
|
||||
flat_obj.code,
|
||||
flat_obj.description
|
||||
)
|
||||
err.stack = flat_obj.stack
|
||||
return err;
|
||||
}
|
||||
|
||||
/** Write the error in an HTTP response */
|
||||
writeResponse(res: ServerResponse) {
|
||||
writeResponse(res: http.ServerResponse) {
|
||||
res.writeHead(this.#code);
|
||||
const asStr = this.toString();
|
||||
res.end(asStr);
|
||||
|
@ -111,65 +56,39 @@ export class ArsenalError extends Error {
|
|||
customizeDescription(description: string): ArsenalError {
|
||||
const type = this.#type;
|
||||
const code = this.#code;
|
||||
const metadata = new Map(this.#metadata);
|
||||
const err = new ArsenalError(type, code, description, metadata);
|
||||
err.stack = this.stack;
|
||||
return err;
|
||||
return new ArsenalError(type, code, description);
|
||||
}
|
||||
|
||||
/** Clone the error with a new metadata field */
|
||||
addMetadataEntry(key: string, value: Object[]): ArsenalError {
|
||||
const type = this.#type;
|
||||
const code = this.#code;
|
||||
const description = this.#description;
|
||||
const metadata = new Map(this.#metadata);
|
||||
metadata.set(key, value);
|
||||
const err = new ArsenalError(type, code, description, metadata);
|
||||
err.stack = this.stack;
|
||||
return err;
|
||||
}
|
||||
|
||||
/** Used to determine the error type. Example: error.is.InternalError */
|
||||
get is() {
|
||||
return this.#is;
|
||||
}
|
||||
|
||||
/** HTTP status code. Example: 401, 403, 500, ... */
|
||||
get code() {
|
||||
return this.#code;
|
||||
}
|
||||
|
||||
/** Text description of the error. */
|
||||
get description() {
|
||||
return this.#description;
|
||||
}
|
||||
|
||||
/**
|
||||
* Type of the error, belonging to Name. is should be prefered instead of
|
||||
* type in a daily-basis, but type remains accessible for future use. */
|
||||
get type() {
|
||||
return this.#type;
|
||||
}
|
||||
|
||||
/** A map of error metadata */
|
||||
get metadata() {
|
||||
return this.#metadata;
|
||||
}
|
||||
|
||||
/** Generate all possible errors. An instance is created by default. */
|
||||
static errors() {
|
||||
const errors = {};
|
||||
Object.entries(rawErrors).forEach((value) => {
|
||||
const name = value[0] as Name;
|
||||
return entries.reduce((acc, value) => {
|
||||
const name = value[0] as types.Name;
|
||||
const error = value[1];
|
||||
const { code, description } = error;
|
||||
const get = () => new ArsenalError(name, code, description);
|
||||
Object.defineProperty(errors, name, { get });
|
||||
});
|
||||
return errors as Errors;
|
||||
const err = new ArsenalError(name, code, description);
|
||||
return { ...acc, [name]: err };
|
||||
}, {} as Errors);
|
||||
}
|
||||
}
|
||||
|
||||
/** Mapping of all possible Errors.
|
||||
* Use them with errors[error].customizeDescription for any customization. */
|
||||
export default ArsenalError.errors();
|
||||
const errors = ArsenalError.errors();
|
||||
|
||||
export default errors;
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
import * as rawErrors from './arsenal-errors';
|
||||
|
||||
const entries = Object.keys(rawErrors).map((v) => [v, v]);
|
||||
|
||||
/** All possible errors. */
|
||||
export type Name = keyof typeof rawErrors
|
||||
/** Object containing all errors. It has the format [Name]: "Name" */
|
||||
export type Names = { [Name_ in Name]: Name_ };
|
||||
|
||||
/** Use types with error.is(types.InternalError) to have nice autocomplete */
|
||||
export const types: Names = Object.fromEntries(entries);
|
|
@ -3,37 +3,41 @@ import ipaddr from 'ipaddr.js';
|
|||
/**
|
||||
* checkIPinRangeOrMatch checks whether a given ip address is in an ip address
|
||||
* range or matches the given ip address
|
||||
* @param cidr - ip address range or ip address
|
||||
* @param ip - parsed ip address
|
||||
* @return true if in range, false if not
|
||||
* @param {string} cidr - ip address range or ip address
|
||||
* @param {object} ip - parsed ip address
|
||||
* @return {boolean} true if in range, false if not
|
||||
*/
|
||||
export function checkIPinRangeOrMatch(
|
||||
cidr: string,
|
||||
ip: ipaddr.IPv4 | ipaddr.IPv6,
|
||||
): boolean {
|
||||
export function checkIPinRangeOrMatch(cidr, ip) {
|
||||
// If there is an exact match of the ip address, no need to check ranges
|
||||
if (ip.toString() === cidr) {
|
||||
return true;
|
||||
}
|
||||
let range;
|
||||
|
||||
try {
|
||||
if (ip instanceof ipaddr.IPv6) {
|
||||
const range = ipaddr.IPv6.parseCIDR(cidr);
|
||||
return ip.match(range);
|
||||
} else {
|
||||
const range = ipaddr.IPv4.parseCIDR(cidr);
|
||||
return ip.match(range);
|
||||
range = ipaddr.IPv4.parseCIDR(cidr);
|
||||
} catch (err) {
|
||||
try {
|
||||
// not ipv4 so try ipv6
|
||||
range = ipaddr.IPv6.parseCIDR(cidr);
|
||||
} catch (err) {
|
||||
// range is not valid ipv4 or ipv6
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
}
|
||||
try {
|
||||
return ip.match(range);
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse IP address into object representation
|
||||
* @param ip - IPV4/IPV6/IPV4-mapped IPV6 address
|
||||
* @return parsedIp - Object representation of parsed IP
|
||||
* @param {string} ip - IPV4/IPV6/IPV4-mapped IPV6 address
|
||||
* @return {object} parsedIp - Object representation of parsed IP
|
||||
*/
|
||||
export function parseIp(ip: string): ipaddr.IPv4 | ipaddr.IPv6 | {} {
|
||||
export function parseIp(ip) {
|
||||
if (ipaddr.IPv4.isValid(ip)) {
|
||||
return ipaddr.parse(ip);
|
||||
}
|
||||
|
@ -41,19 +45,23 @@ export function parseIp(ip: string): ipaddr.IPv4 | ipaddr.IPv6 | {} {
|
|||
// also parses IPv6 mapped IPv4 addresses into IPv4 representation
|
||||
return ipaddr.process(ip);
|
||||
}
|
||||
// not valid ip address according to module, so return empty object
|
||||
// which will obviously not match a range of ip addresses that the parsedIp
|
||||
// is being tested against
|
||||
return {};
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Checks if an IP adress matches a given list of CIDR ranges
|
||||
* @param cidrList - List of CIDR ranges
|
||||
* @param ip - IP address
|
||||
* @return - true if there is match or false for no match
|
||||
* @param {string[]} cidrList - List of CIDR ranges
|
||||
* @param {string} ip - IP address
|
||||
* @return {boolean} - true if there is match or false for no match
|
||||
*/
|
||||
export function ipMatchCidrList(cidrList: string[], ip: string): boolean {
|
||||
export function ipMatchCidrList(cidrList, ip) {
|
||||
const parsedIp = parseIp(ip);
|
||||
return cidrList.some((item) => {
|
||||
let cidr: string | undefined;
|
||||
return cidrList.some(item => {
|
||||
let cidr;
|
||||
// patch the cidr if range is not specified
|
||||
if (item.indexOf('/') === -1) {
|
||||
if (item.startsWith('127.')) {
|
||||
|
@ -62,10 +70,6 @@ export function ipMatchCidrList(cidrList: string[], ip: string): boolean {
|
|||
cidr = `${item}/32`;
|
||||
}
|
||||
}
|
||||
return (
|
||||
(parsedIp instanceof ipaddr.IPv4 ||
|
||||
parsedIp instanceof ipaddr.IPv6) &&
|
||||
checkIPinRangeOrMatch(cidr || item, parsedIp)
|
||||
);
|
||||
return checkIPinRangeOrMatch(cidr || item, parsedIp);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import * as util from 'util';
|
||||
const debug = util.debuglog('jsutil');
|
||||
import { debuglog } from 'util';
|
||||
const debug = debuglog('jsutil');
|
||||
|
||||
// JavaScript utility functions
|
||||
|
||||
|
@ -11,23 +11,21 @@ const debug = util.debuglog('jsutil');
|
|||
* @note underscore.js provides this functionality but not worth
|
||||
* adding a new dependency for such a small use case.
|
||||
*
|
||||
* @param func function to call at most once
|
||||
* @param {function} func function to call at most once
|
||||
|
||||
* @return a callable wrapper mirroring <tt>func</tt> but
|
||||
* @return {function} a callable wrapper mirroring <tt>func</tt> but
|
||||
* only calls <tt>func</tt> at first invocation.
|
||||
*/
|
||||
export function once<T>(func: (...args: any[]) => T): (...args: any[]) => T {
|
||||
type State = { called: boolean; res: any };
|
||||
const state: State = { called: false, res: undefined };
|
||||
return function wrapper(...args: any[]) {
|
||||
export function once(func) {
|
||||
const state = { called: false, res: undefined };
|
||||
return function wrapper(...args) {
|
||||
if (!state.called) {
|
||||
state.called = true;
|
||||
state.res = func.apply(func, args);
|
||||
} else {
|
||||
const m1 = 'function already called:';
|
||||
const m2 = 'returning cached result:';
|
||||
debug(m1, func, m2, state.res);
|
||||
debug('function already called:', func,
|
||||
'returning cached result:', state.res);
|
||||
}
|
||||
return state.res;
|
||||
};
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,15 +1,19 @@
|
|||
import Redis from 'ioredis';
|
||||
import { Logger } from 'werelogs';
|
||||
|
||||
export type Config = { host: string; port: number; password: string };
|
||||
export type Callback = (error: Error | null, value?: any) => void;
|
||||
|
||||
export default class RedisClient {
|
||||
_client: Redis.Redis;
|
||||
_client: Redis
|
||||
|
||||
constructor(config: Config, logger: Logger) {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {Object} config - config
|
||||
* @param {string} config.host - Redis host
|
||||
* @param {number} config.port - Redis port
|
||||
* @param {string} config.password - Redis password
|
||||
* @param {werelogs.Logger} logger - logger instance
|
||||
*/
|
||||
constructor(config, logger) {
|
||||
this._client = new Redis(config);
|
||||
this._client.on('error', err =>
|
||||
this._client.on('error', (err) =>
|
||||
logger.trace('error from redis', {
|
||||
error: err,
|
||||
method: 'RedisClient.constructor',
|
||||
|
@ -22,16 +26,17 @@ export default class RedisClient {
|
|||
|
||||
/**
|
||||
* scan a pattern and return matching keys
|
||||
* @param pattern - string pattern to match with all existing keys
|
||||
* @param [count=10] - scan count
|
||||
* @param cb - callback (error, result)
|
||||
* @param {string} pattern - string pattern to match with all existing keys
|
||||
* @param {number} [count=10] - scan count
|
||||
* @param {callback} cb - callback (error, result)
|
||||
* @return {undefined}
|
||||
*/
|
||||
scan(pattern: string, count = 10, cb: Callback) {
|
||||
scan(pattern: string, count = 10, cb) {
|
||||
const params = { match: pattern, count };
|
||||
const keys: any[] = [];
|
||||
const keys = [];
|
||||
|
||||
const stream = this._client.scanStream(params);
|
||||
stream.on('data', resultKeys => {
|
||||
stream.on('data', (resultKeys) => {
|
||||
for (let i = 0; i < resultKeys.length; i++) {
|
||||
keys.push(resultKeys[i]);
|
||||
}
|
||||
|
@ -41,91 +46,103 @@ export default class RedisClient {
|
|||
});
|
||||
}
|
||||
|
||||
/** increment value of a key by 1 and set a ttl
|
||||
* @param key - key holding the value
|
||||
* @param expiry - expiry in seconds
|
||||
* @param cb - callback
|
||||
/**
|
||||
* increment value of a key by 1 and set a ttl
|
||||
* @param {string} key - key holding the value
|
||||
* @param {number} expiry - expiry in seconds
|
||||
* @param {callback} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
incrEx(key: string, expiry: number, cb: Callback) {
|
||||
const exp = expiry.toString();
|
||||
incrEx(key: string, expiry: number, cb) {
|
||||
return this._client
|
||||
.multi([['incr', key], ['expire', key, exp]])
|
||||
.multi([
|
||||
['incr', key],
|
||||
['expire', key, expiry],
|
||||
])
|
||||
.exec(cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* increment value of a key by a given amount
|
||||
* @param key - key holding the value
|
||||
* @param amount - amount to increase by
|
||||
* @param cb - callback
|
||||
* @param {string} key - key holding the value
|
||||
* @param {number} amount - amount to increase by
|
||||
* @param {callback} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
incrby(key: string, amount: number, cb: Callback) {
|
||||
incrby(key: string, amount: number, cb) {
|
||||
return this._client.incrby(key, amount, cb);
|
||||
}
|
||||
|
||||
/** increment value of a key by a given amount and set a ttl
|
||||
* @param key - key holding the value
|
||||
* @param amount - amount to increase by
|
||||
* @param expiry - expiry in seconds
|
||||
* @param cb - callback
|
||||
/**
|
||||
* increment value of a key by a given amount and set a ttl
|
||||
* @param {string} key - key holding the value
|
||||
* @param {number} amount - amount to increase by
|
||||
* @param {number} expiry - expiry in seconds
|
||||
* @param {callback} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
incrbyEx(key: string, amount: number, expiry: number, cb: Callback) {
|
||||
const am = amount.toString();
|
||||
const exp = expiry.toString();
|
||||
incrbyEx(key: string, amount: number, expiry: number, cb) {
|
||||
return this._client
|
||||
.multi([['incrby', key, am], ['expire', key, exp]])
|
||||
.multi([
|
||||
['incrby', key, amount],
|
||||
['expire', key, expiry],
|
||||
])
|
||||
.exec(cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* decrement value of a key by a given amount
|
||||
* @param key - key holding the value
|
||||
* @param amount - amount to increase by
|
||||
* @param cb - callback
|
||||
* @param {string} key - key holding the value
|
||||
* @param {number} amount - amount to increase by
|
||||
* @param {callback} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
decrby(key: string, amount: number, cb: Callback) {
|
||||
decrby(key: string, amount: number, cb) {
|
||||
return this._client.decrby(key, amount, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* execute a batch of commands
|
||||
* @param cmds - list of commands
|
||||
* @param cb - callback
|
||||
* @return
|
||||
* get value stored at key
|
||||
* @param {string} key - key holding the value
|
||||
* @param {callback} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
batch(cmds: string[][], cb: Callback) {
|
||||
return this._client.pipeline(cmds).exec(cb);
|
||||
get(key: string, cb) {
|
||||
return this._client.get(key, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a key exists
|
||||
* @param key - name of key
|
||||
* @param cb - callback
|
||||
* @param {string} key - name of key
|
||||
* @param {function} cb - callback
|
||||
* If cb response returns 0, key does not exist.
|
||||
* If cb response returns 1, key exists.
|
||||
* @return {undefined}
|
||||
*/
|
||||
exists(key: string, cb: Callback) {
|
||||
exists(key: string, cb) {
|
||||
return this._client.exists(key, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* get value stored at key
|
||||
* @param key - key holding the value
|
||||
* @param cb - callback
|
||||
* execute a batch of commands
|
||||
* @param {string[]} cmds - list of commands
|
||||
* @param {callback} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
get(key: string, cb: Callback) {
|
||||
return this._client.get(key, cb);
|
||||
batch(cmds: string[], cb) {
|
||||
return this._client.pipeline(cmds).exec(cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a value and its score to a sorted set. If no sorted set exists, this
|
||||
* will create a new one for the given key.
|
||||
* @param key - name of key
|
||||
* @param score - score used to order set
|
||||
* @param value - value to store
|
||||
* @param cb - callback
|
||||
* @param {string} key - name of key
|
||||
* @param {integer} score - score used to order set
|
||||
* @param {string} value - value to store
|
||||
* @param {callback} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
zadd(key: string, score: number, value: string, cb: Callback) {
|
||||
zadd(key: string, score: number, value: string, cb) {
|
||||
return this._client.zadd(key, score, value, cb);
|
||||
}
|
||||
|
||||
|
@ -134,10 +151,11 @@ export default class RedisClient {
|
|||
* Note: using this on a key that does not exist will return 0.
|
||||
* Note: using this on an existing key that isn't a sorted set will
|
||||
* return an error WRONGTYPE.
|
||||
* @param key - name of key
|
||||
* @param cb - callback
|
||||
* @param {string} key - name of key
|
||||
* @param {function} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
zcard(key: string, cb: Callback) {
|
||||
zcard(key: string, cb) {
|
||||
return this._client.zcard(key, cb);
|
||||
}
|
||||
|
||||
|
@ -146,73 +164,73 @@ export default class RedisClient {
|
|||
* Note: using this on a key that does not exist will return nil.
|
||||
* Note: using this on a value that does not exist in a valid sorted set key
|
||||
* will return nil.
|
||||
* @param key - name of key
|
||||
* @param value - value within sorted set
|
||||
* @param cb - callback
|
||||
* @param {string} key - name of key
|
||||
* @param {string} value - value within sorted set
|
||||
* @param {function} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
zscore(key: string, value: string, cb: Callback) {
|
||||
zscore(key: string, value: string, cb) {
|
||||
return this._client.zscore(key, value, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a value from a sorted set
|
||||
* @param key - name of key
|
||||
* @param value - value within sorted set. Can specify
|
||||
* @param {string} key - name of key
|
||||
* @param {string|array} value - value within sorted set. Can specify
|
||||
* multiple values within an array
|
||||
* @param cb - callback
|
||||
* @param {function} cb - callback
|
||||
* The cb response returns number of values removed
|
||||
* @return {undefined}
|
||||
*/
|
||||
zrem(key: string, value: string | string[], cb: Callback) {
|
||||
zrem(key: string, value: string | any[], cb) {
|
||||
return this._client.zrem(key, value, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get specified range of elements in a sorted set
|
||||
* @param key - name of key
|
||||
* @param start - start index (inclusive)
|
||||
* @param end - end index (inclusive) (can use -1)
|
||||
* @param cb - callback
|
||||
* @param {string} key - name of key
|
||||
* @param {integer} start - start index (inclusive)
|
||||
* @param {integer} end - end index (inclusive) (can use -1)
|
||||
* @param {function} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
zrange(key: string, start: number, end: number, cb: Callback) {
|
||||
zrange(key: string, start: number, end: number, cb) {
|
||||
return this._client.zrange(key, start, end, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get range of elements in a sorted set based off score
|
||||
* @param key - name of key
|
||||
* @param min - min score value (inclusive)
|
||||
* @param {string} key - name of key
|
||||
* @param {integer|string} min - min score value (inclusive)
|
||||
* (can use "-inf")
|
||||
* @param max - max score value (inclusive)
|
||||
* @param {integer|string} max - max score value (inclusive)
|
||||
* (can use "+inf")
|
||||
* @param cb - callback
|
||||
* @param {function} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
zrangebyscore(
|
||||
key: string,
|
||||
min: number | string,
|
||||
max: number | string,
|
||||
cb: Callback,
|
||||
) {
|
||||
zrangebyscore(key: string, min: number | string, max: number | string, cb) {
|
||||
return this._client.zrangebyscore(key, min, max, cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* get TTL or expiration in seconds
|
||||
* @param key - name of key
|
||||
* @param cb - callback
|
||||
* @param {string} key - name of key
|
||||
* @param {function} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
ttl(key: string, cb: Callback) {
|
||||
ttl(key: string, cb) {
|
||||
return this._client.ttl(key, cb);
|
||||
}
|
||||
|
||||
clear(cb: Callback) {
|
||||
clear(cb) {
|
||||
return this._client.flushdb(cb);
|
||||
}
|
||||
|
||||
disconnect() {
|
||||
this._client.disconnect();
|
||||
disconnect(cb) {
|
||||
return this._client.quit(cb);
|
||||
}
|
||||
|
||||
listClients(cb: Callback) {
|
||||
listClients(cb) {
|
||||
return this._client.client('list', cb);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,19 +1,16 @@
|
|||
import async from 'async';
|
||||
import RedisClient from './RedisClient';
|
||||
import { Logger } from 'werelogs';
|
||||
|
||||
export type Callback = (error: Error | null, value?: any) => void;
|
||||
|
||||
export default class StatsClient {
|
||||
_redis: RedisClient;
|
||||
_redis?: RedisClient;
|
||||
_interval: number;
|
||||
_expiry: number;
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param redisClient - RedisClient instance
|
||||
* @param interval - sampling interval by seconds
|
||||
* @param expiry - sampling duration by seconds
|
||||
* @param {object} redisClient - RedisClient instance
|
||||
* @param {number} interval - sampling interval by seconds
|
||||
* @param {number} expiry - sampling duration by seconds
|
||||
*/
|
||||
constructor(redisClient: RedisClient, interval: number, expiry: number) {
|
||||
this._redis = redisClient;
|
||||
|
@ -22,23 +19,25 @@ export default class StatsClient {
|
|||
return this;
|
||||
}
|
||||
|
||||
/** Utility function to use when callback is undefined */
|
||||
/*
|
||||
* Utility function to use when callback is undefined
|
||||
*/
|
||||
_noop() {}
|
||||
|
||||
/**
|
||||
* normalize to the nearest interval
|
||||
* @param d - Date instance
|
||||
* @return timestamp - normalized to the nearest interval
|
||||
* @param {object} d - Date instance
|
||||
* @return {number} timestamp - normalized to the nearest interval
|
||||
*/
|
||||
_normalizeTimestamp(d: Date): number {
|
||||
const s = d.getSeconds();
|
||||
return d.setSeconds(s - s % this._interval, 0);
|
||||
return d.setSeconds(s - (s % this._interval), 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* set timestamp to the previous interval
|
||||
* @param d - Date instance
|
||||
* @return timestamp - set to the previous interval
|
||||
* @param {object} d - Date instance
|
||||
* @return {number} timestamp - set to the previous interval
|
||||
*/
|
||||
_setPrevInterval(d: Date): number {
|
||||
return d.setSeconds(d.getSeconds() - this._interval);
|
||||
|
@ -46,23 +45,23 @@ export default class StatsClient {
|
|||
|
||||
/**
|
||||
* build redis key to get total number of occurrences on the server
|
||||
* @param name - key name identifier
|
||||
* @param d - Date instance
|
||||
* @return key - key for redis
|
||||
* @param {string} name - key name identifier
|
||||
* @param {Date} date - Date instance
|
||||
* @return {string} key - key for redis
|
||||
*/
|
||||
buildKey(name: string, d: Date): string {
|
||||
return `${name}:${this._normalizeTimestamp(d)}`;
|
||||
buildKey(name: string, date: Date): string {
|
||||
return `${name}:${this._normalizeTimestamp(date)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* reduce the array of values to a single value
|
||||
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
|
||||
* @param arr - Date instance
|
||||
* @return key - key for redis
|
||||
* @param {array} arr - Date instance
|
||||
* @return {string} key - key for redis
|
||||
*/
|
||||
_getCount(arr: [any, string | null][]): number {
|
||||
_getCount(arr: any[]): string {
|
||||
return arr.reduce((prev, a) => {
|
||||
let num = parseInt(a[1] ?? '', 10);
|
||||
let num = parseInt(a[1], 10);
|
||||
num = Number.isNaN(num) ? 0 : num;
|
||||
return prev + num;
|
||||
}, 0);
|
||||
|
@ -70,27 +69,25 @@ export default class StatsClient {
|
|||
|
||||
/**
|
||||
* report/record a new request received on the server
|
||||
* @param id - service identifier
|
||||
* @param incr - optional param increment
|
||||
* @param {string} id - service identifier
|
||||
* @param {number} incr - optional param increment
|
||||
* @param {function} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
reportNewRequest(
|
||||
id: string,
|
||||
incr?: number | ((error: Error | null, value?: any) => void),
|
||||
cb?: (error: Error | null, value?: any) => void,
|
||||
) {
|
||||
reportNewRequest(id: string, incr: number, cb) {
|
||||
if (!this._redis) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
let callback: (error: Error | null, value?: any) => void;
|
||||
let amount: number;
|
||||
let callback;
|
||||
let amount;
|
||||
if (typeof incr === 'function') {
|
||||
// In case where optional `incr` is not passed, but `cb` is passed
|
||||
callback = incr;
|
||||
amount = 1;
|
||||
} else {
|
||||
callback = (cb && typeof cb === 'function') ? cb : this._noop;
|
||||
amount = (typeof incr === 'number') ? incr : 1;
|
||||
callback = cb && typeof cb === 'function' ? cb : this._noop;
|
||||
amount = typeof incr === 'number' ? incr : 1;
|
||||
}
|
||||
|
||||
const key = this.buildKey(`${id}:requests`, new Date());
|
||||
|
@ -100,31 +97,35 @@ export default class StatsClient {
|
|||
|
||||
/**
|
||||
* Increment the given key by the given value.
|
||||
* @param key - The Redis key to increment
|
||||
* @param incr - The value to increment by
|
||||
* @param [cb] - callback
|
||||
* @param {String} key - The Redis key to increment
|
||||
* @param {Number} incr - The value to increment by
|
||||
* @param {function} [cb] - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
incrementKey(key: string, incr: number, cb: Callback) {
|
||||
incrementKey(key: string, incr: number, cb) {
|
||||
const callback = cb || this._noop;
|
||||
return this._redis.incrby(key, incr, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrement the given key by the given value.
|
||||
* @param key - The Redis key to decrement
|
||||
* @param decr - The value to decrement by
|
||||
* @param [cb] - callback
|
||||
* @param {String} key - The Redis key to decrement
|
||||
* @param {Number} decr - The value to decrement by
|
||||
* @param {function} [cb] - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
decrementKey(key: string, decr: number, cb: Callback) {
|
||||
decrementKey(key: string, decr: number, cb) {
|
||||
const callback = cb || this._noop;
|
||||
return this._redis.decrby(key, decr, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* report/record a request that ended up being a 500 on the server
|
||||
* @param id - service identifier
|
||||
* @param {string} id - service identifier
|
||||
* @param {callback} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
report500(id: string, cb?: (error: Error | null, value?: any) => void) {
|
||||
report500(id: string, cb) {
|
||||
if (!this._redis) {
|
||||
return undefined;
|
||||
}
|
||||
|
@ -135,25 +136,29 @@ export default class StatsClient {
|
|||
|
||||
/**
|
||||
* wrapper on `getStats` that handles a list of keys
|
||||
* @param log - Werelogs request logger
|
||||
* @param ids - service identifiers
|
||||
* @param cb - callback to call with the err/result
|
||||
* @param {object} log - Werelogs request logger
|
||||
* @param {array} ids - service identifiers
|
||||
* @param {callback} cb - callback to call with the err/result
|
||||
* @return {undefined}
|
||||
*/
|
||||
getAllStats(log: Logger, ids: string[], cb: Callback) {
|
||||
getAllStats(log, ids: any[], cb) {
|
||||
if (!this._redis) {
|
||||
return cb(null, {});
|
||||
}
|
||||
|
||||
const statsRes = {
|
||||
'requests': 0,
|
||||
requests: 0,
|
||||
'500s': 0,
|
||||
'sampleDuration': this._expiry,
|
||||
sampleDuration: this._expiry,
|
||||
};
|
||||
let requests = 0;
|
||||
let errors = 0;
|
||||
|
||||
// for now set concurrency to default of 10
|
||||
return async.eachLimit(ids, 10, (id: string, done) => {
|
||||
return async.eachLimit(
|
||||
ids,
|
||||
10,
|
||||
(id, done) => {
|
||||
this.getStats(log, id, (err, res) => {
|
||||
if (err) {
|
||||
return done(err);
|
||||
|
@ -162,7 +167,8 @@ export default class StatsClient {
|
|||
errors += res['500s'];
|
||||
return done();
|
||||
});
|
||||
}, error => {
|
||||
},
|
||||
(error) => {
|
||||
if (error) {
|
||||
log.error('error getting stats', {
|
||||
error,
|
||||
|
@ -173,22 +179,25 @@ export default class StatsClient {
|
|||
statsRes.requests = requests;
|
||||
statsRes['500s'] = errors;
|
||||
return cb(null, statsRes);
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* get stats for the last x seconds, x being the sampling duration
|
||||
* @param log - Werelogs request logger
|
||||
* @param id - service identifier
|
||||
* @param {object} log - Werelogs request logger
|
||||
* @param {string} id - service identifier
|
||||
* @param {callback} cb - callback to call with the err/result
|
||||
* @return {undefined}
|
||||
*/
|
||||
getStats(log: Logger, id: string, cb: (error: Error | null, value?: any) => void) {
|
||||
getStats(log, id: string, cb) {
|
||||
if (!this._redis) {
|
||||
return cb(null, {});
|
||||
}
|
||||
const d = new Date();
|
||||
const totalKeys = Math.floor(this._expiry / this._interval);
|
||||
const reqsKeys: ['get', string][] = [];
|
||||
const req500sKeys: ['get', string][] = [];
|
||||
const reqsKeys = [];
|
||||
const req500sKeys = [];
|
||||
for (let i = 0; i < totalKeys; i++) {
|
||||
reqsKeys.push(['get', this.buildKey(`${id}:requests`, d)]);
|
||||
req500sKeys.push(['get', this.buildKey(`${id}:500s`, d)]);
|
||||
|
@ -206,9 +215,9 @@ export default class StatsClient {
|
|||
* index 1 contains the result
|
||||
*/
|
||||
const statsRes = {
|
||||
'requests': 0,
|
||||
requests: 0,
|
||||
'500s': 0,
|
||||
'sampleDuration': this._expiry,
|
||||
sampleDuration: this._expiry,
|
||||
};
|
||||
if (err) {
|
||||
log.error('error getting stats', {
|
||||
|
@ -222,8 +231,8 @@ export default class StatsClient {
|
|||
*/
|
||||
return cb(null, statsRes);
|
||||
}
|
||||
statsRes.requests = this._getCount((results as any)[0]);
|
||||
statsRes['500s'] = this._getCount((results as any)[1]);
|
||||
statsRes.requests = this._getCount(results[0]);
|
||||
statsRes['500s'] = this._getCount(results[1]);
|
||||
return cb(null, statsRes);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
import StatsClient from './StatsClient';
|
||||
import { Logger } from 'werelogs';
|
||||
import async from 'async';
|
||||
|
||||
export type Callback = (error: Error | null, value?: any) => void;
|
||||
import StatsClient from './StatsClient';
|
||||
|
||||
/**
|
||||
* @class StatsModel
|
||||
|
@ -14,44 +11,43 @@ export default class StatsModel extends StatsClient {
|
|||
/**
|
||||
* Utility method to convert 2d array rows to columns, and vice versa
|
||||
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip
|
||||
* @param arrays - 2d array of integers
|
||||
* @return converted array
|
||||
* @param {array} arrays - 2d array of integers
|
||||
* @return {array} converted array
|
||||
*/
|
||||
_zip(arrays: number[][]) {
|
||||
if (arrays.length > 0 && arrays.every(a => Array.isArray(a))) {
|
||||
return arrays[0].map((_, i) => arrays.map(a => a[i]));
|
||||
if (arrays.length > 0 && arrays.every((a) => Array.isArray(a))) {
|
||||
return arrays[0].map((_, i) => arrays.map((a) => a[i]));
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* normalize to the nearest interval
|
||||
* @param d - Date instance
|
||||
* @return timestamp - normalized to the nearest interval
|
||||
* @param {object} d - Date instance
|
||||
* @return {number} timestamp - normalized to the nearest interval
|
||||
*/
|
||||
_normalizeTimestamp(d: Date) {
|
||||
const m = d.getMinutes();
|
||||
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0);
|
||||
return d.setMinutes(m - (m % Math.floor(this._interval / 60)), 0, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* override the method to get the count as an array of integers separated
|
||||
* by each interval
|
||||
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
|
||||
* @param arr - each index contains the result of each batch command
|
||||
* @param {array} arr - each index contains the result of each batch command
|
||||
* where index 0 signifies the error and index 1 contains the result
|
||||
* @return array of integers, ordered from most recent interval to
|
||||
* @return {array} array of integers, ordered from most recent interval to
|
||||
* oldest interval with length of (expiry / interval)
|
||||
*/
|
||||
// @ts-expect-errors
|
||||
_getCount(arr: [any, string | null][]): number[] {
|
||||
_getCount(arr) {
|
||||
const size = Math.floor(this._expiry / this._interval);
|
||||
const array = arr.reduce((store, i) => {
|
||||
let num = parseInt(i[1] ?? '', 10);
|
||||
let num = parseInt(i[1], 10);
|
||||
num = Number.isNaN(num) ? 0 : num;
|
||||
store.push(num);
|
||||
return store;
|
||||
}, [] as number[]);
|
||||
}, []);
|
||||
|
||||
if (array.length < size) {
|
||||
array.push(...Array(size - array.length).fill(0));
|
||||
|
@ -62,23 +58,24 @@ export default class StatsModel extends StatsClient {
|
|||
/**
|
||||
* wrapper on `getStats` that handles a list of keys
|
||||
* override the method to reduce the returned 2d array from `_getCount`
|
||||
* @param log - Werelogs request logger
|
||||
* @param ids - service identifiers
|
||||
* @param cb - callback to call with the err/result
|
||||
* @param {object} log - Werelogs request logger
|
||||
* @param {array} ids - service identifiers
|
||||
* @param {callback} cb - callback to call with the err/result
|
||||
* @return {undefined}
|
||||
*/
|
||||
getAllStats(log: Logger, ids: string[], cb: Callback) {
|
||||
getAllStats(log, ids: string[], cb) {
|
||||
if (!this._redis) {
|
||||
return cb(null, {});
|
||||
}
|
||||
|
||||
const size = Math.floor(this._expiry / this._interval);
|
||||
const statsRes = {
|
||||
'requests': Array(size).fill(0),
|
||||
requests: Array(size).fill(0),
|
||||
'500s': Array(size).fill(0),
|
||||
'sampleDuration': this._expiry,
|
||||
sampleDuration: this._expiry,
|
||||
};
|
||||
const requests: any[] = [];
|
||||
const errors: any[] = [];
|
||||
const requests = [];
|
||||
const errors = [];
|
||||
|
||||
if (ids.length === 0) {
|
||||
return cb(null, statsRes);
|
||||
|
@ -114,13 +111,14 @@ export default class StatsModel extends StatsClient {
|
|||
|
||||
/**
|
||||
* Handles getting a list of global keys.
|
||||
* @param ids - Service identifiers
|
||||
* @param log - Werelogs request logger
|
||||
* @param cb - Callback
|
||||
* @param {array} ids - Service identifiers
|
||||
* @param {object} log - Werelogs request logger
|
||||
* @param {function} cb - Callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
getAllGlobalStats(ids: string[], log: Logger, cb: Callback) {
|
||||
const reqsKeys = ids.map(key => (['get', key]));
|
||||
return this._redis.batch(reqsKeys, (err, res) => {
|
||||
getAllGlobalStats(ids: string[], log, cb) {
|
||||
const reqsKeys = ids.map((key) => ['get', key]);
|
||||
return this._redis!.batch(reqsKeys, (err, res) => {
|
||||
const statsRes = { requests: 0 };
|
||||
if (err) {
|
||||
log.error('error getting metrics', {
|
||||
|
@ -146,8 +144,8 @@ export default class StatsModel extends StatsClient {
|
|||
|
||||
/**
|
||||
* normalize date timestamp to the nearest hour
|
||||
* @param d - Date instance
|
||||
* @return timestamp - normalized to the nearest hour
|
||||
* @param {Date} d - Date instance
|
||||
* @return {number} timestamp - normalized to the nearest hour
|
||||
*/
|
||||
normalizeTimestampByHour(d: Date) {
|
||||
return d.setMinutes(0, 0, 0);
|
||||
|
@ -155,8 +153,8 @@ export default class StatsModel extends StatsClient {
|
|||
|
||||
/**
|
||||
* get previous hour to date given
|
||||
* @param d - Date instance
|
||||
* @return timestamp - one hour prior to date passed
|
||||
* @param {Date} d - Date instance
|
||||
* @return {number} timestamp - one hour prior to date passed
|
||||
*/
|
||||
_getDatePreviousHour(d: Date) {
|
||||
return d.setHours(d.getHours() - 1);
|
||||
|
@ -164,8 +162,8 @@ export default class StatsModel extends StatsClient {
|
|||
|
||||
/**
|
||||
* get list of sorted set key timestamps
|
||||
* @param epoch - epoch time
|
||||
* @return array of sorted set key timestamps
|
||||
* @param {number} epoch - epoch time
|
||||
* @return {array} array of sorted set key timestamps
|
||||
*/
|
||||
getSortedSetHours(epoch: number) {
|
||||
const timestamps: number[] = [];
|
||||
|
@ -179,8 +177,8 @@ export default class StatsModel extends StatsClient {
|
|||
|
||||
/**
|
||||
* get the normalized hour timestamp for given epoch time
|
||||
* @param epoch - epoch time
|
||||
* @return normalized hour timestamp for given time
|
||||
* @param {number} epoch - epoch time
|
||||
* @return {string} normalized hour timestamp for given time
|
||||
*/
|
||||
getSortedSetCurrentHour(epoch: number) {
|
||||
return this.normalizeTimestampByHour(new Date(epoch));
|
||||
|
@ -188,18 +186,14 @@ export default class StatsModel extends StatsClient {
|
|||
|
||||
/**
|
||||
* helper method to add element to a sorted set, applying TTL if new set
|
||||
* @param key - name of key
|
||||
* @param score - score used to order set
|
||||
* @param value - value to store
|
||||
* @param cb - callback
|
||||
* @param {string} key - name of key
|
||||
* @param {integer} score - score used to order set
|
||||
* @param {string} value - value to store
|
||||
* @param {callback} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
addToSortedSet(
|
||||
key: string,
|
||||
score: number,
|
||||
value: string,
|
||||
cb: (error: Error | null, value?: any) => void,
|
||||
) {
|
||||
this._redis.exists(key, (err, resCode) => {
|
||||
addToSortedSet(key: string, score: number, value: string, cb) {
|
||||
this._redis!.exists(key, (err, resCode) => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
@ -208,17 +202,16 @@ export default class StatsModel extends StatsClient {
|
|||
const msInADay = 24 * 60 * 60 * 1000;
|
||||
const nearestHour = this.normalizeTimestampByHour(new Date());
|
||||
// in seconds
|
||||
const ttl = Math.ceil(
|
||||
(msInADay - (Date.now() - nearestHour)) / 1000);
|
||||
const ttl = Math.ceil((msInADay - (Date.now() - nearestHour)) / 1000);
|
||||
const cmds = [
|
||||
['zadd', key, score.toString(), value],
|
||||
['expire', key, ttl.toString()],
|
||||
['zadd', key, score, value],
|
||||
['expire', key, ttl],
|
||||
];
|
||||
return this._redis.batch(cmds, (err, res) => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
const cmdErr = res.find((r: any) => r[0] !== null);
|
||||
const cmdErr = res.find((r) => r[0] !== null);
|
||||
if (cmdErr) {
|
||||
return cb(cmdErr);
|
||||
}
|
||||
|
@ -226,7 +219,7 @@ export default class StatsModel extends StatsClient {
|
|||
return cb(null, successResponse);
|
||||
});
|
||||
}
|
||||
return this._redis.zadd(key, score, value, cb);
|
||||
return this._redis!.zadd(key, score, value, cb);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,35 +1,38 @@
|
|||
import promClient from 'prom-client';
|
||||
|
||||
const collectDefaultMetricsIntervalMs =
|
||||
process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined
|
||||
? Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10)
|
||||
: 10000;
|
||||
|
||||
promClient.collectDefaultMetrics({ timeout: collectDefaultMetricsIntervalMs });
|
||||
|
||||
export default class ZenkoMetrics {
|
||||
static createCounter(params: promClient.CounterConfiguration<string>) {
|
||||
static createCounter(params) {
|
||||
return new promClient.Counter(params);
|
||||
}
|
||||
|
||||
static createGauge(params: promClient.GaugeConfiguration<string>) {
|
||||
static createGauge(params) {
|
||||
return new promClient.Gauge(params);
|
||||
}
|
||||
|
||||
static createHistogram(params: promClient.HistogramConfiguration<string>) {
|
||||
static createHistogram(params) {
|
||||
return new promClient.Histogram(params);
|
||||
}
|
||||
|
||||
static createSummary(params: promClient.SummaryConfiguration<string>) {
|
||||
static createSummary(params) {
|
||||
return new promClient.Summary(params);
|
||||
}
|
||||
|
||||
static getMetric(name: string) {
|
||||
static getMetric(name) {
|
||||
return promClient.register.getSingleMetric(name);
|
||||
}
|
||||
|
||||
static async asPrometheus() {
|
||||
static asPrometheus() {
|
||||
return promClient.register.metrics();
|
||||
}
|
||||
|
||||
static asPrometheusContentType() {
|
||||
return promClient.register.contentType;
|
||||
}
|
||||
|
||||
static collectDefaultMetrics() {
|
||||
return promClient.collectDefaultMetrics();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import errors from '../errors'
|
||||
import errors from '../errors';
|
||||
|
||||
const validServices = {
|
||||
aws: ['s3', 'iam', 'sts', 'ring'],
|
||||
|
@ -9,7 +9,7 @@ export default class ARN {
|
|||
_partition: string;
|
||||
_service: string;
|
||||
_region: string | null;
|
||||
_accountId?: string | null;
|
||||
_accountId: string | null;
|
||||
_resource: string;
|
||||
|
||||
/**
|
||||
|
@ -17,19 +17,13 @@ export default class ARN {
|
|||
* Create an ARN object from its individual components
|
||||
*
|
||||
* @constructor
|
||||
* @param partition - ARN partition (e.g. 'aws')
|
||||
* @param service - service name in partition (e.g. 's3')
|
||||
* @param [region] - AWS region
|
||||
* @param [accountId] - AWS 12-digit account ID
|
||||
* @param resource - AWS resource path (e.g. 'foo/bar')
|
||||
* @param {string} partition - ARN partition (e.g. 'aws')
|
||||
* @param {string} service - service name in partition (e.g. 's3')
|
||||
* @param {string} [region] - AWS region
|
||||
* @param {string} [accountId] - AWS 12-digit account ID
|
||||
* @param {string} resource - AWS resource path (e.g. 'foo/bar')
|
||||
*/
|
||||
constructor(
|
||||
partition: string,
|
||||
service: string,
|
||||
region: string | undefined | null,
|
||||
accountId: string | undefined | null,
|
||||
resource: string,
|
||||
) {
|
||||
constructor(partition: string, service: string, region: string, accountId: string, resource: string) {
|
||||
this._partition = partition;
|
||||
this._service = service;
|
||||
this._region = region || null;
|
||||
|
@ -69,8 +63,8 @@ export default class ARN {
|
|||
`bad ARN: bad account ID "${accountId}": ` +
|
||||
'must be a 12-digit number or "*"') };
|
||||
}
|
||||
const fullResource = (resource !== undefined ?
|
||||
`${resourceType}:${resource}` : resourceType);
|
||||
const fullResource = resource !== undefined ?
|
||||
`${resourceType}:${resource}` : resourceType;
|
||||
return new ARN(partition, service, region, accountId, fullResource);
|
||||
}
|
||||
|
||||
|
@ -91,21 +85,21 @@ export default class ARN {
|
|||
}
|
||||
|
||||
isIAMAccount() {
|
||||
return this.getService() === 'iam'
|
||||
&& this.getAccountId() !== null
|
||||
&& this.getAccountId() !== '*'
|
||||
&& this.getResource() === 'root';
|
||||
return this.getService() === 'iam' &&
|
||||
this.getAccountId() !== null &&
|
||||
this.getAccountId() !== '*' &&
|
||||
this.getResource() === 'root';
|
||||
}
|
||||
isIAMUser() {
|
||||
return this.getService() === 'iam'
|
||||
&& this.getAccountId() !== null
|
||||
&& this.getAccountId() !== '*'
|
||||
&& this.getResource().startsWith('user/');
|
||||
return this.getService() === 'iam' &&
|
||||
this.getAccountId() !== null &&
|
||||
this.getAccountId() !== '*' &&
|
||||
this.getResource().startsWith('user/');
|
||||
}
|
||||
isIAMRole() {
|
||||
return this.getService() === 'iam'
|
||||
&& this.getAccountId() !== null
|
||||
&& this.getResource().startsWith('role');
|
||||
return this.getService() === 'iam' &&
|
||||
this.getAccountId() !== null &&
|
||||
this.getResource().startsWith('role');
|
||||
}
|
||||
|
||||
toString() {
|
||||
|
|
|
@ -1,36 +1,28 @@
|
|||
import { RequestLogger } from 'werelogs';
|
||||
|
||||
import { legacyLocations } from '../constants';
|
||||
import escapeForXml from '../s3middleware/escapeForXml';
|
||||
|
||||
type CloudServerConfig = any;
|
||||
|
||||
export default class BackendInfo {
|
||||
_config: CloudServerConfig;
|
||||
_requestEndpoint: string;
|
||||
_objectLocationConstraint?: string;
|
||||
_bucketLocationConstraint?: string;
|
||||
_legacyLocationConstraint?: string;
|
||||
_config;
|
||||
_objectLocationConstraint;
|
||||
_bucketLocationConstraint;
|
||||
_requestEndpoint;
|
||||
_legacyLocationConstraint;
|
||||
|
||||
/**
|
||||
* Represents the info necessary to evaluate which data backend to use
|
||||
* on a data put call.
|
||||
* @constructor
|
||||
* @param config - CloudServer config containing list of locations
|
||||
* @param objectLocationConstraint - location constraint
|
||||
* @param {object} config - CloudServer config containing list of locations
|
||||
* @param {string | undefined} objectLocationConstraint - location constraint
|
||||
* for object based on user meta header
|
||||
* @param bucketLocationConstraint - location
|
||||
* @param {string | undefined } bucketLocationConstraint - location
|
||||
* constraint for bucket based on bucket metadata
|
||||
* @param requestEndpoint - endpoint to which request was made
|
||||
* @param legacyLocationConstraint - legacy location constraint
|
||||
* @param {string} requestEndpoint - endpoint to which request was made
|
||||
* @param {string | undefined } legacyLocationConstraint - legacy location
|
||||
* constraint
|
||||
*/
|
||||
constructor(
|
||||
config: CloudServerConfig,
|
||||
objectLocationConstraint: string | undefined,
|
||||
bucketLocationConstraint: string | undefined,
|
||||
requestEndpoint: string,
|
||||
legacyLocationConstraint: string | undefined,
|
||||
) {
|
||||
constructor(config, objectLocationConstraint, bucketLocationConstraint,
|
||||
requestEndpoint, legacyLocationConstraint) {
|
||||
this._config = config;
|
||||
this._objectLocationConstraint = objectLocationConstraint;
|
||||
this._bucketLocationConstraint = bucketLocationConstraint;
|
||||
|
@ -41,18 +33,15 @@ export default class BackendInfo {
|
|||
|
||||
/**
|
||||
* validate proposed location constraint against config
|
||||
* @param config - CloudServer config
|
||||
* @param locationConstraint - value of user
|
||||
* @param {object} config - CloudServer config
|
||||
* @param {string | undefined} locationConstraint - value of user
|
||||
* metadata location constraint header or bucket location constraint
|
||||
* @param log - werelogs logger
|
||||
* @return - true if valid, false if not
|
||||
* @param {object} log - werelogs logger
|
||||
* @return {boolean} - true if valid, false if not
|
||||
*/
|
||||
static isValidLocationConstraint(
|
||||
config: CloudServerConfig,
|
||||
locationConstraint: string | undefined,
|
||||
log: RequestLogger,
|
||||
) {
|
||||
if (!locationConstraint || !(locationConstraint in config.locationConstraints)) {
|
||||
static isValidLocationConstraint(config, locationConstraint, log) {
|
||||
if (Object.keys(config.locationConstraints).
|
||||
indexOf(locationConstraint) < 0) {
|
||||
log.trace('proposed locationConstraint is invalid',
|
||||
{ locationConstraint });
|
||||
return false;
|
||||
|
@ -62,17 +51,14 @@ export default class BackendInfo {
|
|||
|
||||
/**
|
||||
* validate that request endpoint is listed in the restEndpoint config
|
||||
* @param config - CloudServer config
|
||||
* @param requestEndpoint - request endpoint
|
||||
* @param log - werelogs logger
|
||||
* @return true if present, false if not
|
||||
* @param {object} config - CloudServer config
|
||||
* @param {string} requestEndpoint - request endpoint
|
||||
* @param {object} log - werelogs logger
|
||||
* @return {boolean} - true if present, false if not
|
||||
*/
|
||||
static isRequestEndpointPresent(
|
||||
config: CloudServerConfig,
|
||||
requestEndpoint: string,
|
||||
log: RequestLogger,
|
||||
) {
|
||||
if (!(requestEndpoint in config.restEndpoints)) {
|
||||
static isRequestEndpointPresent(config, requestEndpoint, log) {
|
||||
if (Object.keys(config.restEndpoints).
|
||||
indexOf(requestEndpoint) < 0) {
|
||||
log.trace('requestEndpoint does not match config restEndpoints',
|
||||
{ requestEndpoint });
|
||||
return false;
|
||||
|
@ -83,18 +69,14 @@ export default class BackendInfo {
|
|||
/**
|
||||
* validate that locationConstraint for request Endpoint matches
|
||||
* one config locationConstraint
|
||||
* @param config - CloudServer config
|
||||
* @param requestEndpoint - request endpoint
|
||||
* @param log - werelogs logger
|
||||
* @return - true if matches, false if not
|
||||
* @param {object} config - CloudServer config
|
||||
* @param {string} requestEndpoint - request endpoint
|
||||
* @param {object} log - werelogs logger
|
||||
* @return {boolean} - true if matches, false if not
|
||||
*/
|
||||
static isRequestEndpointValueValid(
|
||||
config: CloudServerConfig,
|
||||
requestEndpoint: string,
|
||||
log: RequestLogger,
|
||||
) {
|
||||
const restEndpoint = config.restEndpoints[requestEndpoint];
|
||||
if (!(restEndpoint in config.locationConstraints)) {
|
||||
static isRequestEndpointValueValid(config, requestEndpoint, log) {
|
||||
if (Object.keys(config.locationConstraints).
|
||||
indexOf(config.restEndpoints[requestEndpoint]) < 0) {
|
||||
log.trace('the default locationConstraint for request' +
|
||||
'Endpoint does not match any config locationConstraint',
|
||||
{ requestEndpoint });
|
||||
|
@ -105,11 +87,11 @@ export default class BackendInfo {
|
|||
|
||||
/**
|
||||
* validate that s3 server is running with a file or memory backend
|
||||
* @param config - CloudServer config
|
||||
* @param log - werelogs logger
|
||||
* @return - true if running with file/mem backend, false if not
|
||||
* @param {object} config - CloudServer config
|
||||
* @param {object} log - werelogs logger
|
||||
* @return {boolean} - true if running with file/mem backend, false if not
|
||||
*/
|
||||
static isMemOrFileBackend(config: CloudServerConfig, log: RequestLogger) {
|
||||
static isMemOrFileBackend(config, log) {
|
||||
if (config.backends.data === 'mem' || config.backends.data === 'file') {
|
||||
log.trace('use data backend for the location', {
|
||||
dataBackend: config.backends.data,
|
||||
|
@ -127,16 +109,12 @@ export default class BackendInfo {
|
|||
* data backend for the location.
|
||||
* - if locationConstraint for request Endpoint does not match
|
||||
* any config locationConstraint, we will return an error
|
||||
* @param config - CloudServer config
|
||||
* @param requestEndpoint - request endpoint
|
||||
* @param log - werelogs logger
|
||||
* @return - true if valid, false if not
|
||||
* @param {object} config - CloudServer config
|
||||
* @param {string} requestEndpoint - request endpoint
|
||||
* @param {object} log - werelogs logger
|
||||
* @return {boolean} - true if valid, false if not
|
||||
*/
|
||||
static isValidRequestEndpointOrBackend(
|
||||
config: CloudServerConfig,
|
||||
requestEndpoint: string,
|
||||
log: RequestLogger,
|
||||
) {
|
||||
static isValidRequestEndpointOrBackend(config, requestEndpoint, log) {
|
||||
if (!BackendInfo.isRequestEndpointPresent(config, requestEndpoint,
|
||||
log)) {
|
||||
return BackendInfo.isMemOrFileBackend(config, log);
|
||||
|
@ -147,22 +125,17 @@ export default class BackendInfo {
|
|||
|
||||
/**
|
||||
* validate controlling BackendInfo Parameter
|
||||
* @param config - CloudServer config
|
||||
* @param objectLocationConstraint - value of user
|
||||
* @param {object} config - CloudServer config
|
||||
* @param {string | undefined} objectLocationConstraint - value of user
|
||||
* metadata location constraint header
|
||||
* @param bucketLocationConstraint - location
|
||||
* @param {string | null} bucketLocationConstraint - location
|
||||
* constraint from bucket metadata
|
||||
* @param requestEndpoint - endpoint of request
|
||||
* @param log - werelogs logger
|
||||
* @return - location constraint validity
|
||||
* @param {string} requestEndpoint - endpoint of request
|
||||
* @param {object} log - werelogs logger
|
||||
* @return {object} - location constraint validity
|
||||
*/
|
||||
static controllingBackendParam(
|
||||
config: CloudServerConfig,
|
||||
objectLocationConstraint: string | undefined,
|
||||
bucketLocationConstraint: string | null,
|
||||
requestEndpoint: string,
|
||||
log: RequestLogger,
|
||||
) {
|
||||
static controllingBackendParam(config, objectLocationConstraint,
|
||||
bucketLocationConstraint, requestEndpoint, log) {
|
||||
if (objectLocationConstraint) {
|
||||
if (BackendInfo.isValidLocationConstraint(config,
|
||||
objectLocationConstraint, log)) {
|
||||
|
@ -208,16 +181,16 @@ export default class BackendInfo {
|
|||
|
||||
/**
|
||||
* Return legacyLocationConstraint
|
||||
* @param config CloudServer config
|
||||
* @return legacyLocationConstraint;
|
||||
* @param {object} config CloudServer config
|
||||
* @return {string | undefined} legacyLocationConstraint;
|
||||
*/
|
||||
static getLegacyLocationConstraint(config: CloudServerConfig) {
|
||||
static getLegacyLocationConstraint(config) {
|
||||
return legacyLocations.find(ll => config.locationConstraints[ll]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return objectLocationConstraint
|
||||
* @return objectLocationConstraint;
|
||||
* @return {string | undefined} objectLocationConstraint;
|
||||
*/
|
||||
getObjectLocationConstraint() {
|
||||
return this._objectLocationConstraint;
|
||||
|
@ -225,7 +198,7 @@ export default class BackendInfo {
|
|||
|
||||
/**
|
||||
* Return bucketLocationConstraint
|
||||
* @return bucketLocationConstraint;
|
||||
* @return {string | undefined} bucketLocationConstraint;
|
||||
*/
|
||||
getBucketLocationConstraint() {
|
||||
return this._bucketLocationConstraint;
|
||||
|
@ -233,7 +206,7 @@ export default class BackendInfo {
|
|||
|
||||
/**
|
||||
* Return requestEndpoint
|
||||
* @return requestEndpoint;
|
||||
* @return {string} requestEndpoint;
|
||||
*/
|
||||
getRequestEndpoint() {
|
||||
return this._requestEndpoint;
|
||||
|
@ -248,9 +221,9 @@ export default class BackendInfo {
|
|||
* (4) default locationConstraint for requestEndpoint if requestEndpoint
|
||||
* is listed in restEndpoints in config.json
|
||||
* (5) default data backend
|
||||
* @return locationConstraint;
|
||||
* @return {string} locationConstraint;
|
||||
*/
|
||||
getControllingLocationConstraint(): string {
|
||||
getControllingLocationConstraint() {
|
||||
const objectLC = this.getObjectLocationConstraint();
|
||||
const bucketLC = this.getBucketLocationConstraint();
|
||||
const reqEndpoint = this.getRequestEndpoint();
|
||||
|
|
|
@ -1,86 +1,42 @@
|
|||
export type DeleteRetentionPolicy = {
|
||||
enabled: boolean;
|
||||
days: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper class to ease access to the Azure specific information for
|
||||
* storage accounts mapped to buckets.
|
||||
*/
|
||||
export default class BucketAzureInfo {
|
||||
_data: {
|
||||
sku: string;
|
||||
accessTier: string;
|
||||
kind: string;
|
||||
systemKeys: string[];
|
||||
tenantKeys: string[];
|
||||
subscriptionId: string;
|
||||
resourceGroup: string;
|
||||
deleteRetentionPolicy: DeleteRetentionPolicy;
|
||||
managementPolicies: any[];
|
||||
httpsOnly: boolean;
|
||||
tags: any;
|
||||
networkACL: any[];
|
||||
cname: string;
|
||||
azureFilesAADIntegration: boolean;
|
||||
hnsEnabled: boolean;
|
||||
logging: any;
|
||||
hourMetrics: any;
|
||||
minuteMetrics: any;
|
||||
serviceVersion: string;
|
||||
}
|
||||
_data
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param obj - Raw structure for the Azure info on storage account
|
||||
* @param obj.sku - SKU name of this storage account
|
||||
* @param obj.accessTier - Access Tier name of this storage account
|
||||
* @param obj.kind - Kind name of this storage account
|
||||
* @param obj.systemKeys - pair of shared keys for the system
|
||||
* @param obj.tenantKeys - pair of shared keys for the tenant
|
||||
* @param obj.subscriptionId - subscription ID the storage account
|
||||
* @param {object} obj - Raw structure for the Azure info on storage account
|
||||
* @param {string} obj.sku - SKU name of this storage account
|
||||
* @param {string} obj.accessTier - Access Tier name of this storage account
|
||||
* @param {string} obj.kind - Kind name of this storage account
|
||||
* @param {string[]} obj.systemKeys - pair of shared keys for the system
|
||||
* @param {string[]} obj.tenantKeys - pair of shared keys for the tenant
|
||||
* @param {string} obj.subscriptionId - subscription ID the storage account
|
||||
* belongs to
|
||||
* @param obj.resourceGroup - Resource group name the storage
|
||||
* @param {string} obj.resourceGroup - Resource group name the storage
|
||||
* account belongs to
|
||||
* @param obj.deleteRetentionPolicy - Delete retention policy
|
||||
* @param obj.deleteRetentionPolicy.enabled -
|
||||
* @param obj.deleteRetentionPolicy.days -
|
||||
* @param obj.managementPolicies - Management policies for this
|
||||
* @param {object} obj.deleteRetentionPolicy - Delete retention policy
|
||||
* @param {boolean} obj.deleteRetentionPolicy.enabled -
|
||||
* @param {number} obj.deleteRetentionPolicy.days -
|
||||
* @param {object[]} obj.managementPolicies - Management policies for this
|
||||
* storage account
|
||||
* @param obj.httpsOnly - Server the content of this storage
|
||||
* @param {boolean} obj.httpsOnly - Server the content of this storage
|
||||
* account through HTTPS only
|
||||
* @param obj.tags - Set of tags applied on this storage account
|
||||
* @param obj.networkACL - Network ACL of this storage account
|
||||
* @param obj.cname - CNAME of this storage account
|
||||
* @param obj.azureFilesAADIntegration - whether or not Azure
|
||||
* @param {object} obj.tags - Set of tags applied on this storage account
|
||||
* @param {object[]} obj.networkACL - Network ACL of this storage account
|
||||
* @param {string} obj.cname - CNAME of this storage account
|
||||
* @param {boolean} obj.azureFilesAADIntegration - whether or not Azure
|
||||
* Files AAD Integration is enabled for this storage account
|
||||
* @param obj.hnsEnabled - whether or not a hierarchical namespace
|
||||
* @param {boolean} obj.hnsEnabled - whether or not a hierarchical namespace
|
||||
* is enabled for this storage account
|
||||
* @param obj.logging - service properties: logging
|
||||
* @param obj.hourMetrics - service properties: hourMetrics
|
||||
* @param obj.minuteMetrics - service properties: minuteMetrics
|
||||
* @param obj.serviceVersion - service properties: serviceVersion
|
||||
* @param {object} obj.logging - service properties: logging
|
||||
* @param {object} obj.hourMetrics - service properties: hourMetrics
|
||||
* @param {object} obj.minuteMetrics - service properties: minuteMetrics
|
||||
* @param {string} obj.serviceVersion - service properties: serviceVersion
|
||||
*/
|
||||
constructor(obj: {
|
||||
sku: string;
|
||||
accessTier: string;
|
||||
kind: string;
|
||||
systemKeys: string[];
|
||||
tenantKeys: string[];
|
||||
subscriptionId: string;
|
||||
resourceGroup: string;
|
||||
deleteRetentionPolicy: DeleteRetentionPolicy;
|
||||
managementPolicies: any[];
|
||||
httpsOnly: boolean;
|
||||
tags: any;
|
||||
networkACL: any[];
|
||||
cname: string;
|
||||
azureFilesAADIntegration: boolean;
|
||||
hnsEnabled: boolean;
|
||||
logging: any;
|
||||
hourMetrics: any;
|
||||
minuteMetrics: any;
|
||||
serviceVersion: string;
|
||||
}) {
|
||||
constructor(obj) {
|
||||
this._data = {
|
||||
sku: obj.sku,
|
||||
accessTier: obj.accessTier,
|
||||
|
@ -108,7 +64,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.sku;
|
||||
}
|
||||
|
||||
setSku(sku: string) {
|
||||
setSku(sku) {
|
||||
this._data.sku = sku;
|
||||
return this;
|
||||
}
|
||||
|
@ -117,7 +73,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.accessTier;
|
||||
}
|
||||
|
||||
setAccessTier(accessTier: string) {
|
||||
setAccessTier(accessTier) {
|
||||
this._data.accessTier = accessTier;
|
||||
return this;
|
||||
}
|
||||
|
@ -126,7 +82,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.kind;
|
||||
}
|
||||
|
||||
setKind(kind: string) {
|
||||
setKind(kind) {
|
||||
this._data.kind = kind;
|
||||
return this;
|
||||
}
|
||||
|
@ -135,7 +91,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.systemKeys;
|
||||
}
|
||||
|
||||
setSystemKeys(systemKeys: string[]) {
|
||||
setSystemKeys(systemKeys) {
|
||||
this._data.systemKeys = systemKeys;
|
||||
return this;
|
||||
}
|
||||
|
@ -144,7 +100,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.tenantKeys;
|
||||
}
|
||||
|
||||
setTenantKeys(tenantKeys: string[]) {
|
||||
setTenantKeys(tenantKeys) {
|
||||
this._data.tenantKeys = tenantKeys;
|
||||
return this;
|
||||
}
|
||||
|
@ -153,7 +109,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.subscriptionId;
|
||||
}
|
||||
|
||||
setSubscriptionId(subscriptionId: string) {
|
||||
setSubscriptionId(subscriptionId) {
|
||||
this._data.subscriptionId = subscriptionId;
|
||||
return this;
|
||||
}
|
||||
|
@ -162,7 +118,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.resourceGroup;
|
||||
}
|
||||
|
||||
setResourceGroup(resourceGroup: string) {
|
||||
setResourceGroup(resourceGroup) {
|
||||
this._data.resourceGroup = resourceGroup;
|
||||
return this;
|
||||
}
|
||||
|
@ -171,7 +127,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.deleteRetentionPolicy;
|
||||
}
|
||||
|
||||
setDeleteRetentionPolicy(deleteRetentionPolicy: DeleteRetentionPolicy) {
|
||||
setDeleteRetentionPolicy(deleteRetentionPolicy) {
|
||||
this._data.deleteRetentionPolicy = deleteRetentionPolicy;
|
||||
return this;
|
||||
}
|
||||
|
@ -180,7 +136,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.managementPolicies;
|
||||
}
|
||||
|
||||
setManagementPolicies(managementPolicies: any[]) {
|
||||
setManagementPolicies(managementPolicies) {
|
||||
this._data.managementPolicies = managementPolicies;
|
||||
return this;
|
||||
}
|
||||
|
@ -189,7 +145,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.httpsOnly;
|
||||
}
|
||||
|
||||
setHttpsOnly(httpsOnly: boolean) {
|
||||
setHttpsOnly(httpsOnly) {
|
||||
this._data.httpsOnly = httpsOnly;
|
||||
return this;
|
||||
}
|
||||
|
@ -198,7 +154,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.tags;
|
||||
}
|
||||
|
||||
setTags(tags: any) {
|
||||
setTags(tags) {
|
||||
this._data.tags = tags;
|
||||
return this;
|
||||
}
|
||||
|
@ -207,7 +163,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.networkACL;
|
||||
}
|
||||
|
||||
setNetworkACL(networkACL: any[]) {
|
||||
setNetworkACL(networkACL) {
|
||||
this._data.networkACL = networkACL;
|
||||
return this;
|
||||
}
|
||||
|
@ -216,7 +172,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.cname;
|
||||
}
|
||||
|
||||
setCname(cname: string) {
|
||||
setCname(cname) {
|
||||
this._data.cname = cname;
|
||||
return this;
|
||||
}
|
||||
|
@ -225,7 +181,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.azureFilesAADIntegration;
|
||||
}
|
||||
|
||||
setAzureFilesAADIntegration(azureFilesAADIntegration: boolean) {
|
||||
setAzureFilesAADIntegration(azureFilesAADIntegration) {
|
||||
this._data.azureFilesAADIntegration = azureFilesAADIntegration;
|
||||
return this;
|
||||
}
|
||||
|
@ -234,7 +190,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.hnsEnabled;
|
||||
}
|
||||
|
||||
setHnsEnabled(hnsEnabled: boolean) {
|
||||
setHnsEnabled(hnsEnabled) {
|
||||
this._data.hnsEnabled = hnsEnabled;
|
||||
return this;
|
||||
}
|
||||
|
@ -243,7 +199,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.logging;
|
||||
}
|
||||
|
||||
setLogging(logging: any) {
|
||||
setLogging(logging) {
|
||||
this._data.logging = logging;
|
||||
return this;
|
||||
}
|
||||
|
@ -252,7 +208,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.hourMetrics;
|
||||
}
|
||||
|
||||
setHourMetrics(hourMetrics: any) {
|
||||
setHourMetrics(hourMetrics) {
|
||||
this._data.hourMetrics = hourMetrics;
|
||||
return this;
|
||||
}
|
||||
|
@ -261,7 +217,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.minuteMetrics;
|
||||
}
|
||||
|
||||
setMinuteMetrics(minuteMetrics: any) {
|
||||
setMinuteMetrics(minuteMetrics) {
|
||||
this._data.minuteMetrics = minuteMetrics;
|
||||
return this;
|
||||
}
|
||||
|
@ -270,7 +226,7 @@ export default class BucketAzureInfo {
|
|||
return this._data.serviceVersion;
|
||||
}
|
||||
|
||||
setServiceVersion(serviceVersion: any) {
|
||||
setServiceVersion(serviceVersion) {
|
||||
this._data.serviceVersion = serviceVersion;
|
||||
return this;
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,5 +1,6 @@
|
|||
import assert from 'assert';
|
||||
import errors, { ArsenalError } from '../errors';
|
||||
|
||||
import errors from '../errors';
|
||||
import { validateResourcePolicy } from '../policy/policyValidator';
|
||||
|
||||
/**
|
||||
|
@ -49,21 +50,22 @@ const objectActions = [
|
|||
];
|
||||
|
||||
export default class BucketPolicy {
|
||||
_json: string;
|
||||
_policy: any;
|
||||
_json
|
||||
_policy
|
||||
|
||||
/**
|
||||
* Create a Bucket Policy instance
|
||||
* @param json - the json policy
|
||||
* @return - BucketPolicy instance
|
||||
* @param {string} json - the json policy
|
||||
* @return {object} - BucketPolicy instance
|
||||
*/
|
||||
constructor(json: string) {
|
||||
constructor(json) {
|
||||
this._json = json;
|
||||
this._policy = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the bucket policy
|
||||
* @return - the bucket policy or error
|
||||
* @return {object} - the bucket policy or error
|
||||
*/
|
||||
getBucketPolicy() {
|
||||
const policy = this._getPolicy();
|
||||
|
@ -72,12 +74,15 @@ export default class BucketPolicy {
|
|||
|
||||
/**
|
||||
* Get the bucket policy array
|
||||
* @return - contains error if policy validation fails
|
||||
* @return {object} - contains error if policy validation fails
|
||||
*/
|
||||
_getPolicy(): { error: ArsenalError } | any {
|
||||
_getPolicy() {
|
||||
if (!this._json || this._json === '') {
|
||||
return { error: errors.MalformedPolicy.customizeDescription(
|
||||
'request json is empty or undefined') };
|
||||
return {
|
||||
error: errors.MalformedPolicy.customizeDescription(
|
||||
'request json is empty or undefined'
|
||||
),
|
||||
};
|
||||
}
|
||||
const validSchema = validateResourcePolicy(this._json);
|
||||
if (validSchema.error) {
|
||||
|
@ -102,37 +107,45 @@ export default class BucketPolicy {
|
|||
|
||||
/**
|
||||
* Validate action and resource are compatible
|
||||
* @return - contains error or empty obj
|
||||
* @return {error} - contains error or empty obj
|
||||
*/
|
||||
_validateActionResource(): { error?: ArsenalError } {
|
||||
const invalid = this._policy.Statement.every((s: any) => {
|
||||
const actions: string[] = typeof s.Action === 'string' ?
|
||||
[s.Action] : s.Action;
|
||||
const resources: string[] = typeof s.Resource === 'string' ?
|
||||
[s.Resource] : s.Resource;
|
||||
const objectAction = actions.some(a =>
|
||||
a.includes('Object') || objectActions.includes(a));
|
||||
_validateActionResource() {
|
||||
const invalid = this._policy.Statement.every((s) => {
|
||||
const actions =
|
||||
typeof s.Action === 'string' ? [s.Action] : s.Action;
|
||||
const resources =
|
||||
typeof s.Resource === 'string' ? [s.Resource] : s.Resource;
|
||||
const objectAction = actions.some(
|
||||
(a) => a.includes('Object') || objectActions.includes(a)
|
||||
);
|
||||
// wildcardObjectAction checks for actions such as 's3:*' or
|
||||
// 's3:Put*' but will return false for actions such as
|
||||
// 's3:PutBucket*'
|
||||
const wildcardObjectAction = actions.some(
|
||||
a => a.includes('*') && !a.includes('Bucket'));
|
||||
const objectResource = resources.some(r => r.includes('/'));
|
||||
return ((objectAction && !objectResource) ||
|
||||
(objectResource && !objectAction && !wildcardObjectAction));
|
||||
(a) => a.includes('*') && !a.includes('Bucket')
|
||||
);
|
||||
const objectResource = resources.some((r) => r.includes('/'));
|
||||
return (
|
||||
(objectAction && !objectResource) ||
|
||||
(objectResource && !objectAction && !wildcardObjectAction)
|
||||
);
|
||||
});
|
||||
if (invalid) {
|
||||
return { error: errors.MalformedPolicy.customizeDescription(
|
||||
'Action does not apply to any resource(s) in statement') };
|
||||
return {
|
||||
error: errors.MalformedPolicy.customizeDescription(
|
||||
'Action does not apply to any resource(s) in statement'
|
||||
),
|
||||
};
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Call resource policy schema validation function
|
||||
* @param policy - the bucket policy object to validate
|
||||
* @param {object} policy - the bucket policy object to validate
|
||||
* @return {undefined}
|
||||
*/
|
||||
static validatePolicy(policy: any) {
|
||||
static validatePolicy(policy) {
|
||||
// only the BucketInfo constructor calls this function
|
||||
// and BucketInfo will always be passed an object
|
||||
const validated = validateResourcePolicy(JSON.stringify(policy));
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,37 +1,12 @@
|
|||
import uuid from 'uuid/v4';
|
||||
|
||||
export type Status = 'Disabled' | 'Enabled';
|
||||
export type Tag = { Key: string; Value: string };
|
||||
export type Tags = Tag[];
|
||||
export type And = { Prefix?: string; Tags: Tags };
|
||||
export type Filter = { Prefix?: string; Tag?: Tag } | { And: And };
|
||||
export type Expiration = {
|
||||
ExpiredObjectDeleteMarker?: number | boolean;
|
||||
Date?: number | boolean;
|
||||
Days?: number | boolean;
|
||||
};
|
||||
export type NoncurrentExpiration = {
|
||||
NoncurrentDays: number | null;
|
||||
NewerNoncurrentVersions: number | null;
|
||||
};
|
||||
|
||||
/**
|
||||
* @class LifecycleRule
|
||||
*
|
||||
* @classdesc Simple get/set class to build a single Rule
|
||||
*/
|
||||
export default class LifecycleRule {
|
||||
id: string;
|
||||
status: Status;
|
||||
tags: Tags;
|
||||
expiration?: Expiration;
|
||||
ncvExpiration?: NoncurrentExpiration;
|
||||
abortMPU?: { DaysAfterInitiation: number };
|
||||
transitions?: any[];
|
||||
ncvTransitions?: any[];
|
||||
prefix?: string;
|
||||
|
||||
constructor(id: string, status: Status) {
|
||||
constructor(id, status) {
|
||||
// defaults
|
||||
this.id = id || uuid();
|
||||
this.status = status === 'Disabled' ? 'Disabled' : 'Enabled';
|
||||
|
@ -39,23 +14,16 @@ export default class LifecycleRule {
|
|||
}
|
||||
|
||||
build() {
|
||||
const rule: {
|
||||
ID: string;
|
||||
Status: Status;
|
||||
Expiration?: Expiration;
|
||||
NoncurrentVersionExpiration?: NoncurrentExpiration;
|
||||
AbortIncompleteMultipartUpload?: { DaysAfterInitiation: number };
|
||||
Transitions?: any[];
|
||||
NoncurrentVersionTransitions?: any[];
|
||||
Filter?: Filter;
|
||||
Prefix?: '';
|
||||
} = { ID: this.id, Status: this.status };
|
||||
const rule = {};
|
||||
|
||||
rule.ID = this.id;
|
||||
rule.Status = this.status;
|
||||
|
||||
if (this.expiration) {
|
||||
rule.Expiration = this.expiration;
|
||||
}
|
||||
if (this.ncvExpiration) {
|
||||
rule.NoncurrentVersionExpiration = this.ncvExpiration
|
||||
rule.NoncurrentVersionExpiration = this.ncvExpiration;
|
||||
}
|
||||
if (this.abortMPU) {
|
||||
rule.AbortIncompleteMultipartUpload = this.abortMPU;
|
||||
|
@ -63,11 +31,26 @@ export default class LifecycleRule {
|
|||
if (this.transitions) {
|
||||
rule.Transitions = this.transitions;
|
||||
}
|
||||
if (this.ncvTransitions) {
|
||||
rule.NoncurrentVersionTransitions = this.ncvTransitions;
|
||||
}
|
||||
|
||||
const filter = this.buildFilter();
|
||||
|
||||
const filter = {};
|
||||
if ((this.prefix && this.tags.length) || (this.tags.length > 1)) {
|
||||
// And rule
|
||||
const andRule = {};
|
||||
|
||||
if (this.prefix) {
|
||||
andRule.Prefix = this.prefix;
|
||||
}
|
||||
andRule.Tags = this.tags;
|
||||
filter.And = andRule;
|
||||
} else {
|
||||
if (this.prefix) {
|
||||
filter.Prefix = this.prefix;
|
||||
}
|
||||
if (this.tags.length) {
|
||||
filter.Tag = this.tags[0];
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(filter).length > 0) {
|
||||
rule.Filter = filter;
|
||||
|
@ -78,27 +61,7 @@ export default class LifecycleRule {
|
|||
return rule;
|
||||
}
|
||||
|
||||
buildFilter() {
|
||||
if ((this.prefix && this.tags.length) || this.tags.length > 1) {
|
||||
// And rule
|
||||
const And: And = { Tags: this.tags };
|
||||
if (this.prefix) {
|
||||
And.Prefix = this.prefix;
|
||||
}
|
||||
return { And };
|
||||
} else {
|
||||
const filter: Filter = {};
|
||||
if (this.prefix) {
|
||||
filter.Prefix = this.prefix;
|
||||
}
|
||||
if (this.tags.length > 0) {
|
||||
filter.Tag = this.tags[0];
|
||||
}
|
||||
return filter;
|
||||
}
|
||||
}
|
||||
|
||||
addID(id: string) {
|
||||
addID(id) {
|
||||
this.id = id;
|
||||
return this;
|
||||
}
|
||||
|
@ -108,12 +71,12 @@ export default class LifecycleRule {
|
|||
return this;
|
||||
}
|
||||
|
||||
addPrefix(prefix: string) {
|
||||
addPrefix(prefix) {
|
||||
this.prefix = prefix;
|
||||
return this;
|
||||
}
|
||||
|
||||
addTag(key: string, value: string) {
|
||||
addTag(key, value) {
|
||||
this.tags.push({
|
||||
Key: key,
|
||||
Value: value,
|
||||
|
@ -123,18 +86,16 @@ export default class LifecycleRule {
|
|||
|
||||
/**
|
||||
* Expiration
|
||||
* @param prop - Property must be defined in `validProps`
|
||||
* @param value - integer for `Date` or `Days`, or boolean for `ExpiredObjectDeleteMarker`
|
||||
* @param {string} prop - Property must be defined in `validProps`
|
||||
* @param {integer|boolean} value - integer for `Date` or `Days`, or
|
||||
* boolean for `ExpiredObjectDeleteMarker`
|
||||
* @return {undefined}
|
||||
*/
|
||||
addExpiration(prop: 'ExpiredObjectDeleteMarker', value: boolean): this;
|
||||
addExpiration(prop: 'Date' | 'Days', value: number): this;
|
||||
addExpiration(prop: string, value: number | boolean) {
|
||||
addExpiration(prop, value) {
|
||||
const validProps = ['Date', 'Days', 'ExpiredObjectDeleteMarker'];
|
||||
if (validProps.includes(prop)) {
|
||||
if (validProps.indexOf(prop) > -1) {
|
||||
this.expiration = this.expiration || {};
|
||||
if (prop === 'ExpiredObjectDeleteMarker') {
|
||||
// FIXME
|
||||
// @ts-expect-error
|
||||
this.expiration[prop] = JSON.parse(value);
|
||||
} else {
|
||||
this.expiration[prop] = value;
|
||||
|
@ -145,46 +106,31 @@ export default class LifecycleRule {
|
|||
|
||||
/**
|
||||
* NoncurrentVersionExpiration
|
||||
* @param prop - Property must be defined in `validProps`
|
||||
* @param value - integer for `NoncurrentDays` and `NewerNoncurrentVersions`
|
||||
* @param {integer} days - NoncurrentDays
|
||||
* @return {undefined}
|
||||
*/
|
||||
addNCVExpiration(prop: 'NoncurrentDays' | 'NewerNoncurrentVersions', value: number): this;
|
||||
addNCVExpiration(prop: string, value: number) {
|
||||
const validProps = ['NoncurrentDays', 'NewerNoncurrentVersions'];
|
||||
if (validProps.includes(prop)) {
|
||||
this.ncvExpiration = this.ncvExpiration || {
|
||||
NoncurrentDays: null,
|
||||
NewerNoncurrentVersions: null,
|
||||
};
|
||||
this.ncvExpiration[prop] = value;
|
||||
}
|
||||
addNCVExpiration(days) {
|
||||
this.ncvExpiration = { NoncurrentDays: days };
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* abortincompletemultipartupload
|
||||
* @param days - DaysAfterInitiation
|
||||
* AbortIncompleteMultipartUpload
|
||||
* @param {integer} days - DaysAfterInitiation
|
||||
* @return {undefined}
|
||||
*/
|
||||
addAbortMPU(days: number) {
|
||||
addAbortMPU(days) {
|
||||
this.abortMPU = { DaysAfterInitiation: days };
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transitions
|
||||
* @param transitions - transitions
|
||||
* @param {array} transitions - transitions
|
||||
* @return {undefined}
|
||||
*/
|
||||
addTransitions(transitions: any[]) {
|
||||
addTransitions(transitions) {
|
||||
this.transitions = transitions;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* NonCurrentVersionTransitions
|
||||
* @param nvcTransitions - NonCurrentVersionTransitions
|
||||
*/
|
||||
addNCVTransitions(nvcTransitions) {
|
||||
this.ncvTransitions = nvcTransitions;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ import {
|
|||
supportedNotificationEvents,
|
||||
notificationArnPrefix,
|
||||
} from '../constants';
|
||||
import errors, { ArsenalError } from '../errors';
|
||||
import errors from '../errors';
|
||||
|
||||
/**
|
||||
* Format of xml request:
|
||||
|
@ -52,26 +52,20 @@ import errors, { ArsenalError } from '../errors';
|
|||
*/
|
||||
|
||||
export default class NotificationConfiguration {
|
||||
_parsedXml: any;
|
||||
_config: {
|
||||
error?: ArsenalError;
|
||||
queueConfig?: any[];
|
||||
};
|
||||
_ids: Set<string>;
|
||||
/**
|
||||
* Create a Notification Configuration instance
|
||||
* @param xml - parsed configuration xml
|
||||
* @return - NotificationConfiguration instance
|
||||
* @param {string} xml - parsed configuration xml
|
||||
* @return {object} - NotificationConfiguration instance
|
||||
*/
|
||||
constructor(xml: any) {
|
||||
constructor(xml) {
|
||||
this._parsedXml = xml;
|
||||
this._config = {};
|
||||
this._ids = new Set();
|
||||
this._ids = new Set([]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get notification configuration
|
||||
* @return - contains error if parsing failed
|
||||
* @return {object} - contains error if parsing failed
|
||||
*/
|
||||
getValidatedNotificationConfiguration() {
|
||||
const validationError = this._parseNotificationConfig();
|
||||
|
@ -83,7 +77,7 @@ export default class NotificationConfiguration {
|
|||
|
||||
/**
|
||||
* Check that notification configuration is valid
|
||||
* @return - error if parsing failed, else undefined
|
||||
* @return {error | null} - error if parsing failed, else undefined
|
||||
*/
|
||||
_parseNotificationConfig() {
|
||||
if (!this._parsedXml || this._parsedXml === '') {
|
||||
|
@ -101,19 +95,19 @@ export default class NotificationConfiguration {
|
|||
return null;
|
||||
}
|
||||
this._config.queueConfig = [];
|
||||
let parseError: ArsenalError | undefined;
|
||||
let parseError;
|
||||
for (let i = 0; i < queueConfig.length; i++) {
|
||||
const eventObj = this._parseEvents(queueConfig[i].Event);
|
||||
const filterObj = this._parseFilter(queueConfig[i].Filter);
|
||||
const idObj = this._parseId(queueConfig[i].Id);
|
||||
const arnObj = this._parseArn(queueConfig[i].Queue);
|
||||
|
||||
if ('error' in eventObj) {
|
||||
if (eventObj.error) {
|
||||
parseError = eventObj.error;
|
||||
this._config = {};
|
||||
break;
|
||||
}
|
||||
if ('error' in filterObj) {
|
||||
if (filterObj.error) {
|
||||
parseError = filterObj.error;
|
||||
this._config = {};
|
||||
break;
|
||||
|
@ -135,43 +129,42 @@ export default class NotificationConfiguration {
|
|||
filterRules: filterObj.filterRules,
|
||||
});
|
||||
}
|
||||
return parseError ?? null;
|
||||
return parseError;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that events array is valid
|
||||
* @param events - event array
|
||||
* @return - contains error if parsing failed or events array
|
||||
* @param {array} events - event array
|
||||
* @return {object} - contains error if parsing failed or events array
|
||||
*/
|
||||
_parseEvents(events: any[]) {
|
||||
if (!events || !events[0]) {
|
||||
const msg = 'each queue configuration must contain an event';
|
||||
const error = errors.MalformedXML.customizeDescription(msg);
|
||||
return { error };
|
||||
}
|
||||
const eventsObj: { error?: ArsenalError, events: any[] } = {
|
||||
events: [] as any[],
|
||||
_parseEvents(events) {
|
||||
const eventsObj = {
|
||||
events: [],
|
||||
};
|
||||
for (const e of events) {
|
||||
if (!events || !events[0]) {
|
||||
eventsObj.error = errors.MalformedXML.customizeDescription(
|
||||
'each queue configuration must contain an event');
|
||||
return eventsObj;
|
||||
}
|
||||
events.forEach(e => {
|
||||
if (!supportedNotificationEvents.has(e)) {
|
||||
const msg = 'event array contains invalid or unsupported event';
|
||||
const error = errors.MalformedXML.customizeDescription(msg);
|
||||
return { error };
|
||||
eventsObj.error = errors.MalformedXML.customizeDescription(
|
||||
'event array contains invalid or unsupported event');
|
||||
} else {
|
||||
eventsObj.events.push(e);
|
||||
}
|
||||
}
|
||||
});
|
||||
return eventsObj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that filter array is valid
|
||||
* @param filter - filter array
|
||||
* @return - contains error if parsing failed or filter array
|
||||
* @param {array} filter - filter array
|
||||
* @return {object} - contains error if parsing failed or filter array
|
||||
*/
|
||||
_parseFilter(filter: any[]) {
|
||||
_parseFilter(filter) {
|
||||
if (!filter || !filter[0]) {
|
||||
return { filterRules: undefined };
|
||||
return {};
|
||||
}
|
||||
if (!filter[0].S3Key || !filter[0].S3Key[0]) {
|
||||
return { error: errors.MalformedXML.customizeDescription(
|
||||
|
@ -182,7 +175,7 @@ export default class NotificationConfiguration {
|
|||
return { error: errors.MalformedXML.customizeDescription(
|
||||
'if included, queue configuration filter must contain a rule') };
|
||||
}
|
||||
const filterObj: { filterRules: { name: string; value: string }[] } = {
|
||||
const filterObj = {
|
||||
filterRules: [],
|
||||
};
|
||||
const ruleArray = filterRules.FilterRule;
|
||||
|
@ -208,15 +201,15 @@ export default class NotificationConfiguration {
|
|||
|
||||
/**
|
||||
* Check that id string is valid
|
||||
* @param id - id string (optional)
|
||||
* @return - contains error if parsing failed or id
|
||||
* @param {string} id - id string (optional)
|
||||
* @return {object} - contains error if parsing failed or id
|
||||
*/
|
||||
_parseId(id: string) {
|
||||
_parseId(id) {
|
||||
if (id && id[0].length > 255) {
|
||||
return { error: errors.InvalidArgument.customizeDescription(
|
||||
'queue configuration ID is greater than 255 characters long') };
|
||||
}
|
||||
let validId: string;
|
||||
let validId;
|
||||
if (!id || !id[0]) {
|
||||
// id is optional property, so create one if not provided or is ''
|
||||
// We generate 48-character alphanumeric, unique id for rule
|
||||
|
@ -235,10 +228,10 @@ export default class NotificationConfiguration {
|
|||
|
||||
/**
|
||||
* Check that arn string is valid
|
||||
* @param arn - queue arn
|
||||
* @return - contains error if parsing failed or queue arn
|
||||
* @param {string} arn - queue arn
|
||||
* @return {object} - contains error if parsing failed or queue arn
|
||||
*/
|
||||
_parseArn(arn: string) {
|
||||
_parseArn(arn) {
|
||||
if (!arn || !arn[0]) {
|
||||
return { error: errors.MalformedXML.customizeDescription(
|
||||
'each queue configuration must contain a queue arn'),
|
||||
|
@ -256,21 +249,11 @@ export default class NotificationConfiguration {
|
|||
|
||||
/**
|
||||
* Get XML representation of notification configuration object
|
||||
* @param config - notification configuration object
|
||||
* @return - XML representation of config
|
||||
* @param {object} config - notification configuration object
|
||||
* @return {string} - XML representation of config
|
||||
*/
|
||||
static getConfigXML(config: {
|
||||
queueConfig: {
|
||||
id: string;
|
||||
events: string[];
|
||||
queueArn: string;
|
||||
filterRules: {
|
||||
name: string;
|
||||
value: string;
|
||||
}[];
|
||||
}[];
|
||||
}) {
|
||||
const xmlArray: string[] = [];
|
||||
static getConfigXML(config) {
|
||||
const xmlArray = [];
|
||||
if (config && config.queueConfig) {
|
||||
config.queueConfig.forEach(c => {
|
||||
xmlArray.push('<QueueConfiguration>');
|
||||
|
@ -301,19 +284,20 @@ export default class NotificationConfiguration {
|
|||
/**
|
||||
* Validate the bucket metadata notification configuration structure and
|
||||
* value types
|
||||
* @param config - The notificationconfiguration to validate
|
||||
* @param {object} config - The notificationconfiguration to validate
|
||||
* @return {undefined}
|
||||
*/
|
||||
static validateConfig(config: any) {
|
||||
static validateConfig(config) {
|
||||
assert.strictEqual(typeof config, 'object');
|
||||
if (!config.queueConfig) {
|
||||
return;
|
||||
}
|
||||
config.queueConfig.forEach((q: any) => {
|
||||
config.queueConfig.forEach(q => {
|
||||
const { events, queueArn, filterRules, id } = q;
|
||||
events.forEach((e: any) => assert.strictEqual(typeof e, 'string'));
|
||||
events.forEach(e => assert.strictEqual(typeof e, 'string'));
|
||||
assert.strictEqual(typeof queueArn, 'string');
|
||||
if (filterRules) {
|
||||
filterRules.forEach((f: any) => {
|
||||
filterRules.forEach(f => {
|
||||
assert.strictEqual(typeof f.name, 'string');
|
||||
assert.strictEqual(typeof f.value, 'string');
|
||||
});
|
||||
|
|
|
@ -1,12 +1,5 @@
|
|||
import assert from 'assert';
|
||||
import errors, { ArsenalError } from '../errors';
|
||||
|
||||
export type Config = any;
|
||||
export type LockMode = 'GOVERNANCE' | 'COMPLIANCE';
|
||||
export type DefaultRetention = { Days: number } | { Years: number };
|
||||
export type ParsedRetention =
|
||||
| { error: ArsenalError }
|
||||
| { timeType: 'days' | 'years'; timeValue: number };
|
||||
import errors from '../errors';
|
||||
|
||||
/**
|
||||
* Format of xml request:
|
||||
|
@ -34,22 +27,19 @@ export type ParsedRetention =
|
|||
* }
|
||||
*/
|
||||
export default class ObjectLockConfiguration {
|
||||
_parsedXml: any;
|
||||
_config: Config;
|
||||
|
||||
/**
|
||||
* Create an Object Lock Configuration instance
|
||||
* @param xml - the parsed configuration xml
|
||||
* @return - ObjectLockConfiguration instance
|
||||
* @param {string} xml - the parsed configuration xml
|
||||
* @return {object} - ObjectLockConfiguration instance
|
||||
*/
|
||||
constructor(xml: any) {
|
||||
constructor(xml) {
|
||||
this._parsedXml = xml;
|
||||
this._config = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the object lock configuration
|
||||
* @return - contains error if parsing failed
|
||||
* @return {object} - contains error if parsing failed
|
||||
*/
|
||||
getValidatedObjectLockConfiguration() {
|
||||
const validConfig = this._parseObjectLockConfig();
|
||||
|
@ -61,128 +51,131 @@ export default class ObjectLockConfiguration {
|
|||
|
||||
/**
|
||||
* Check that mode is valid
|
||||
* @param mode - array containing mode value
|
||||
* @return - contains error if parsing failed
|
||||
* @param {array} mode - array containing mode value
|
||||
* @return {object} - contains error if parsing failed
|
||||
*/
|
||||
_parseMode(mode: LockMode[]): { error: ArsenalError } | { mode: LockMode } {
|
||||
_parseMode(mode) {
|
||||
const validMode = {};
|
||||
const expectedModes = ['GOVERNANCE', 'COMPLIANCE'];
|
||||
if (!mode || !mode[0]) {
|
||||
const msg = 'request xml does not contain Mode';
|
||||
const error = errors.MalformedXML.customizeDescription(msg);
|
||||
return { error };
|
||||
validMode.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml does not contain Mode');
|
||||
return validMode;
|
||||
}
|
||||
if (mode.length > 1) {
|
||||
const msg = 'request xml contains more than one Mode';
|
||||
const error = errors.MalformedXML.customizeDescription(msg);
|
||||
return { error };
|
||||
validMode.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml contains more than one Mode');
|
||||
return validMode;
|
||||
}
|
||||
if (!expectedModes.includes(mode[0])) {
|
||||
const msg = 'Mode request xml must be one of "GOVERNANCE", "COMPLIANCE"';
|
||||
const error = errors.MalformedXML.customizeDescription(msg);
|
||||
return { error };
|
||||
validMode.error = errors.MalformedXML.customizeDescription(
|
||||
'Mode request xml must be one of "GOVERNANCE", "COMPLIANCE"');
|
||||
return validMode;
|
||||
}
|
||||
return { mode: mode[0] };
|
||||
validMode.mode = mode[0];
|
||||
return validMode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that time limit is valid
|
||||
* @param dr - DefaultRetention object containing days or years
|
||||
* @return - contains error if parsing failed
|
||||
* @param {object} dr - DefaultRetention object containing days or years
|
||||
* @return {object} - contains error if parsing failed
|
||||
*/
|
||||
_parseTime(dr: DefaultRetention): ParsedRetention {
|
||||
if ('Days' in dr && 'Years' in dr) {
|
||||
const msg = 'request xml contains both Days and Years';
|
||||
const error = errors.MalformedXML.customizeDescription(msg);
|
||||
return { error };
|
||||
_parseTime(dr) {
|
||||
const validTime = {};
|
||||
if (dr.Days && dr.Years) {
|
||||
validTime.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml contains both Days and Years');
|
||||
return validTime;
|
||||
}
|
||||
const timeType = 'Days' in dr ? 'Days' : 'Years';
|
||||
const timeType = dr.Days ? 'Days' : 'Years';
|
||||
if (!dr[timeType] || !dr[timeType][0]) {
|
||||
const msg = 'request xml does not contain Days or Years';
|
||||
const error = errors.MalformedXML.customizeDescription(msg);
|
||||
return { error };
|
||||
validTime.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml does not contain Days or Years');
|
||||
return validTime;
|
||||
}
|
||||
if (dr[timeType].length > 1) {
|
||||
const msg = 'request xml contains more than one retention period';
|
||||
const error = errors.MalformedXML.customizeDescription(msg);
|
||||
return { error };
|
||||
validTime.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml contains more than one retention period');
|
||||
return validTime;
|
||||
}
|
||||
const timeValue = Number.parseInt(dr[timeType][0], 10);
|
||||
if (Number.isNaN(timeValue)) {
|
||||
const msg = 'request xml does not contain valid retention period';
|
||||
const error = errors.MalformedXML.customizeDescription(msg);
|
||||
return { error };
|
||||
validTime.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml does not contain valid retention period');
|
||||
return validTime;
|
||||
}
|
||||
if (timeValue < 1) {
|
||||
const msg = 'retention period must be a positive integer';
|
||||
const error = errors.InvalidArgument.customizeDescription(msg);
|
||||
return { error };
|
||||
validTime.error = errors.InvalidArgument.customizeDescription(
|
||||
'retention period must be a positive integer');
|
||||
return validTime;
|
||||
}
|
||||
if ((timeType === 'Days' && timeValue > 36500) ||
|
||||
(timeType === 'Years' && timeValue > 100)) {
|
||||
const msg = 'retention period is too large';
|
||||
const error = errors.InvalidArgument.customizeDescription(msg);
|
||||
return { error };
|
||||
validTime.error = errors.InvalidArgument.customizeDescription(
|
||||
'retention period is too large');
|
||||
return validTime;
|
||||
}
|
||||
return {
|
||||
timeType: timeType.toLowerCase() as 'days' | 'years',
|
||||
timeValue: timeValue,
|
||||
};
|
||||
validTime.timeType = timeType.toLowerCase();
|
||||
validTime.timeValue = timeValue;
|
||||
return validTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that object lock configuration is valid
|
||||
* @return - contains error if parsing failed
|
||||
* @return {object} - contains error if parsing failed
|
||||
*/
|
||||
_parseObjectLockConfig() {
|
||||
const validConfig: { error?: ArsenalError } = {};
|
||||
const validConfig = {};
|
||||
if (!this._parsedXml || this._parsedXml === '') {
|
||||
const msg = 'request xml is undefined or empty';
|
||||
const error = errors.MalformedXML.customizeDescription(msg);
|
||||
return { error };
|
||||
validConfig.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml is undefined or empty');
|
||||
return validConfig;
|
||||
}
|
||||
const objectLockConfig = this._parsedXml.ObjectLockConfiguration;
|
||||
if (!objectLockConfig || objectLockConfig === '') {
|
||||
const msg = 'request xml does not include ObjectLockConfiguration';
|
||||
const error = errors.MalformedXML.customizeDescription(msg);
|
||||
return { error };
|
||||
validConfig.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml does not include ObjectLockConfiguration');
|
||||
return validConfig;
|
||||
}
|
||||
const objectLockEnabled = objectLockConfig.ObjectLockEnabled;
|
||||
if (!objectLockEnabled || objectLockEnabled[0] !== 'Enabled') {
|
||||
const msg = 'request xml does not include valid ObjectLockEnabled';
|
||||
const error = errors.MalformedXML.customizeDescription(msg);
|
||||
return { error };
|
||||
validConfig.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml does not include valid ObjectLockEnabled');
|
||||
return validConfig;
|
||||
}
|
||||
const ruleArray = objectLockConfig.Rule;
|
||||
if (ruleArray) {
|
||||
if (ruleArray.length > 1) {
|
||||
const msg = 'request xml contains more than one rule';
|
||||
const error = errors.MalformedXML.customizeDescription(msg);
|
||||
return { error };
|
||||
validConfig.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml contains more than one rule');
|
||||
return validConfig;
|
||||
}
|
||||
const drArray = ruleArray[0].DefaultRetention;
|
||||
if (!drArray || !drArray[0] || drArray[0] === '') {
|
||||
const msg = 'Rule request xml does not contain DefaultRetention';
|
||||
const error = errors.MalformedXML.customizeDescription(msg);
|
||||
return { error };
|
||||
validConfig.error = errors.MalformedXML.customizeDescription(
|
||||
'Rule request xml does not contain DefaultRetention');
|
||||
return validConfig;
|
||||
}
|
||||
if (!drArray[0].Mode || (!drArray[0].Days && !drArray[0].Years)) {
|
||||
const msg =
|
||||
validConfig.error = errors.MalformedXML.customizeDescription(
|
||||
'DefaultRetention request xml does not contain Mode or ' +
|
||||
'retention period (Days or Years)';
|
||||
const error = errors.MalformedXML.customizeDescription(msg);
|
||||
return { error };
|
||||
'retention period (Days or Years)');
|
||||
return validConfig;
|
||||
}
|
||||
const validMode = this._parseMode(drArray[0].Mode);
|
||||
if ('error' in validMode) {
|
||||
return validMode;
|
||||
if (validMode.error) {
|
||||
validConfig.error = validMode.error;
|
||||
return validConfig;
|
||||
}
|
||||
const validTime = this._parseTime(drArray[0]);
|
||||
if ('error' in validTime) {
|
||||
return validTime;
|
||||
if (validTime.error) {
|
||||
validConfig.error = validTime.error;
|
||||
return validConfig;
|
||||
}
|
||||
this._config.rule = {};
|
||||
this._config.rule.mode = validMode.mode;
|
||||
this._config.rule[validTime.timeType!] = validTime.timeValue;
|
||||
this._config.rule[validTime.timeType] = validTime.timeValue;
|
||||
}
|
||||
return validConfig;
|
||||
}
|
||||
|
@ -190,9 +183,10 @@ export default class ObjectLockConfiguration {
|
|||
/**
|
||||
* Validate the bucket metadata object lock configuration structure and
|
||||
* value types
|
||||
* @param config - The object lock configuration to validate
|
||||
* @param {object} config - The object lock configuration to validate
|
||||
* @return {undefined}
|
||||
*/
|
||||
static validateConfig(config: any) {
|
||||
static validateConfig(config) {
|
||||
assert.strictEqual(typeof config, 'object');
|
||||
const rule = config.rule;
|
||||
if (rule) {
|
||||
|
@ -208,10 +202,10 @@ export default class ObjectLockConfiguration {
|
|||
|
||||
/**
|
||||
* Get the XML representation of the configuration object
|
||||
* @param config - The bucket object lock configuration
|
||||
* @return - The XML representation of the configuration
|
||||
* @param {object} config - The bucket object lock configuration
|
||||
* @return {string} - The XML representation of the configuration
|
||||
*/
|
||||
static getConfigXML(config: any) {
|
||||
static getConfigXML(config) {
|
||||
// object lock is enabled on the bucket but object lock configuration
|
||||
// not set
|
||||
if (config.rule === undefined) {
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,94 +0,0 @@
|
|||
/*
|
||||
* Code based on Yutaka Oishi (Fujifilm) contributions
|
||||
* Date: 11 Sep 2020
|
||||
*/
|
||||
|
||||
/**
|
||||
* class representing the x-amz-restore of object metadata.
|
||||
*
|
||||
* @class
|
||||
*/
|
||||
export default class ObjectMDAmzRestore {
|
||||
'expiry-date': Date | string;
|
||||
'ongoing-request': boolean;
|
||||
|
||||
/**
|
||||
*
|
||||
* @constructor
|
||||
* @param ongoingRequest ongoing-request
|
||||
* @param [expiryDate] expiry-date
|
||||
* @throws case of invalid parameter
|
||||
*/
|
||||
constructor(ongoingRequest: boolean, expiryDate?: Date | string) {
|
||||
this.setOngoingRequest(ongoingRequest);
|
||||
this.setExpiryDate(expiryDate);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param data archiveInfo
|
||||
* @returns true if the provided object is valid
|
||||
*/
|
||||
static isValid(data: { 'ongoing-request': boolean; 'expiry-date': Date | string }) {
|
||||
try {
|
||||
// eslint-disable-next-line no-new
|
||||
new ObjectMDAmzRestore(data['ongoing-request'], data['expiry-date']);
|
||||
return true;
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns ongoing-request
|
||||
*/
|
||||
getOngoingRequest() {
|
||||
return this['ongoing-request'];
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param value ongoing-request
|
||||
* @throws case of invalid parameter
|
||||
*/
|
||||
setOngoingRequest(value?: boolean) {
|
||||
if (value === undefined) {
|
||||
throw new Error('ongoing-request is required.');
|
||||
} else if (typeof value !== 'boolean') {
|
||||
throw new Error('ongoing-request must be type of boolean.');
|
||||
}
|
||||
this['ongoing-request'] = value;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns expiry-date
|
||||
*/
|
||||
getExpiryDate() {
|
||||
return this['expiry-date'];
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param value expiry-date
|
||||
* @throws case of invalid parameter
|
||||
*/
|
||||
setExpiryDate(value?: Date | string) {
|
||||
if (value) {
|
||||
const checkWith = (new Date(value)).getTime();
|
||||
if (Number.isNaN(Number(checkWith))) {
|
||||
throw new Error('expiry-date is must be a valid Date.');
|
||||
}
|
||||
this['expiry-date'] = value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns itself
|
||||
*/
|
||||
getValue() {
|
||||
return this;
|
||||
}
|
||||
}
|
|
@ -1,184 +0,0 @@
|
|||
/**
|
||||
* class representing the archive of object metadata.
|
||||
*
|
||||
* @class
|
||||
*/
|
||||
export default class ObjectMDArchive {
|
||||
archiveInfo: any;
|
||||
// @ts-ignore
|
||||
restoreRequestedAt: Date | string;
|
||||
// @ts-ignore
|
||||
restoreRequestedDays: number;
|
||||
// @ts-ignore
|
||||
restoreCompletedAt: Date | string;
|
||||
// @ts-ignore
|
||||
restoreWillExpireAt: Date | string;
|
||||
|
||||
/**
|
||||
*
|
||||
* @constructor
|
||||
* @param archiveInfo contains the archive info set by the TLP and returned by the TLP jobs
|
||||
* @param [restoreRequestedAt] set at the time restore request is made by the client
|
||||
* @param [restoreRequestedDays] set at the time restore request is made by the client
|
||||
* @param [restoreCompletedAt] set at the time of successful restore
|
||||
* @param [restoreWillExpireAt] computed and stored at the time of restore
|
||||
* @throws case of invalid parameter
|
||||
*/
|
||||
constructor(
|
||||
archiveInfo: any,
|
||||
restoreRequestedAt?: Date | string,
|
||||
restoreRequestedDays?: number,
|
||||
restoreCompletedAt?: Date | string,
|
||||
restoreWillExpireAt?: Date | string,
|
||||
) {
|
||||
this.setArchiveInfo(archiveInfo);
|
||||
this.setRestoreRequestedAt(restoreRequestedAt!);
|
||||
this.setRestoreRequestedDays(restoreRequestedDays!);
|
||||
this.setRestoreCompletedAt(restoreCompletedAt!);
|
||||
this.setRestoreWillExpireAt(restoreWillExpireAt!);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param data archiveInfo
|
||||
* @returns true if the provided object is valid
|
||||
*/
|
||||
static isValid(data: {
|
||||
archiveInfo: any;
|
||||
restoreRequestedAt?: Date;
|
||||
restoreRequestedDays?: number;
|
||||
restoreCompletedAt?: Date;
|
||||
restoreWillExpireAt?: Date;
|
||||
}) {
|
||||
try {
|
||||
// eslint-disable-next-line no-new
|
||||
new ObjectMDArchive(
|
||||
data.archiveInfo,
|
||||
data.restoreRequestedAt,
|
||||
data.restoreRequestedDays,
|
||||
data.restoreCompletedAt,
|
||||
data.restoreWillExpireAt,
|
||||
);
|
||||
return true;
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns archiveInfo
|
||||
*/
|
||||
getArchiveInfo() {
|
||||
return this.archiveInfo;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param value archiveInfo
|
||||
* @throws case of invalid parameter
|
||||
*/
|
||||
setArchiveInfo(value: any) {
|
||||
if (!value) {
|
||||
throw new Error('archiveInfo is required.');
|
||||
} else if (typeof value !== 'object') {
|
||||
throw new Error('archiveInfo must be type of object.');
|
||||
}
|
||||
this.archiveInfo = value;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns restoreRequestedAt
|
||||
*/
|
||||
getRestoreRequestedAt() {
|
||||
return this.restoreRequestedAt;
|
||||
}
|
||||
/**
|
||||
* @param value restoreRequestedAt
|
||||
* @throws case of invalid parameter
|
||||
*/
|
||||
setRestoreRequestedAt(value: Date | string) {
|
||||
if (value) {
|
||||
const checkWith = (new Date(value)).getTime();
|
||||
if (Number.isNaN(Number(checkWith))) {
|
||||
throw new Error('restoreRequestedAt must be a valid Date.');
|
||||
}
|
||||
this.restoreRequestedAt = value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns restoreRequestedDays
|
||||
*/
|
||||
getRestoreRequestedDays() {
|
||||
return this.restoreRequestedDays;
|
||||
}
|
||||
/**
|
||||
* @param value restoreRequestedDays
|
||||
* @throws case of invalid parameter
|
||||
*/
|
||||
setRestoreRequestedDays(value: number) {
|
||||
if (value) {
|
||||
if (isNaN(value)) {
|
||||
throw new Error('restoreRequestedDays must be type of Number.');
|
||||
}
|
||||
this.restoreRequestedDays = value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns restoreCompletedAt
|
||||
*/
|
||||
getRestoreCompletedAt() {
|
||||
return this.restoreCompletedAt;
|
||||
}
|
||||
/**
|
||||
* @param value restoreCompletedAt
|
||||
* @throws case of invalid parameter
|
||||
*/
|
||||
setRestoreCompletedAt(value: Date | string) {
|
||||
if (value) {
|
||||
if (!this.restoreRequestedAt || !this.restoreRequestedDays) {
|
||||
throw new Error('restoreCompletedAt must be set after restoreRequestedAt and restoreRequestedDays.');
|
||||
}
|
||||
const checkWith = (new Date(value)).getTime();
|
||||
if (Number.isNaN(Number(checkWith))) {
|
||||
throw new Error('restoreCompletedAt must be a valid Date.');
|
||||
}
|
||||
this.restoreCompletedAt = value;
|
||||
}
|
||||
}
|
||||
/**
|
||||
*
|
||||
* @returns restoreWillExpireAt
|
||||
*/
|
||||
getRestoreWillExpireAt() {
|
||||
return this.restoreWillExpireAt;
|
||||
}
|
||||
/**
|
||||
* @param value restoreWillExpireAt
|
||||
* @throws case of invalid parameter
|
||||
*/
|
||||
setRestoreWillExpireAt(value: Date | string) {
|
||||
if (value) {
|
||||
if (!this.restoreRequestedAt || !this.restoreRequestedDays) {
|
||||
throw new Error('restoreWillExpireAt must be set after restoreRequestedAt and restoreRequestedDays.');
|
||||
}
|
||||
const checkWith = (new Date(value)).getTime();
|
||||
if (Number.isNaN(Number(checkWith))) {
|
||||
throw new Error('restoreWillExpireAt must be a valid Date.');
|
||||
}
|
||||
this.restoreWillExpireAt = value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @returns itself
|
||||
*/
|
||||
getValue() {
|
||||
return this;
|
||||
}
|
||||
}
|
|
@ -3,60 +3,32 @@
|
|||
* Blob and Container objects.
|
||||
*/
|
||||
export default class ObjectMDAzureInfo {
|
||||
_data: {
|
||||
containerPublicAccess: string;
|
||||
containerStoredAccessPolicies: any[];
|
||||
containerImmutabilityPolicy: any;
|
||||
containerLegalHoldStatus: boolean;
|
||||
containerDeletionInProgress: boolean;
|
||||
blobType: string;
|
||||
blobContentMD5: string;
|
||||
blobIssuedETag: string;
|
||||
blobCopyInfo: any;
|
||||
blobSequenceNumber: number;
|
||||
blobAccessTierChangeTime: Date;
|
||||
blobUncommitted: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param obj - Raw structure for the Azure info on Blob/Container
|
||||
* @param obj.containerPublicAccess - Public access authorization
|
||||
* @param {object} obj - Raw structure for the Azure info on Blob/Container
|
||||
* @param {string} obj.containerPublicAccess - Public access authorization
|
||||
* type
|
||||
* @param obj.containerStoredAccessPolicies - Access policies
|
||||
* @param {object[]} obj.containerStoredAccessPolicies - Access policies
|
||||
* for Shared Access Signature bearer
|
||||
* @param obj.containerImmutabilityPolicy - data immutability
|
||||
* @param {object} obj.containerImmutabilityPolicy - data immutability
|
||||
* policy for this container
|
||||
* @param obj.containerLegalHoldStatus - legal hold status for
|
||||
* @param {boolean} obj.containerLegalHoldStatus - legal hold status for
|
||||
* this container
|
||||
* @param obj.containerDeletionInProgress - deletion in progress
|
||||
* @param {boolean} obj.containerDeletionInProgress - deletion in progress
|
||||
* indicator for this container
|
||||
* @param obj.blobType - defines the type of blob for this object
|
||||
* @param obj.blobContentMD5 - whole object MD5 sum set by the
|
||||
* @param {string} obj.blobType - defines the type of blob for this object
|
||||
* @param {string} obj.blobContentMD5 - whole object MD5 sum set by the
|
||||
* client through the Azure API
|
||||
* @param obj.blobIssuedETag - backup of the issued ETag on MD only
|
||||
* @param {string} obj.blobIssuedETag - backup of the issued ETag on MD only
|
||||
* operations like Set Blob Properties and Set Blob Metadata
|
||||
* @param obj.blobCopyInfo - information pertaining to past and
|
||||
* @param {object} obj.blobCopyInfo - information pertaining to past and
|
||||
* pending copy operation targeting this object
|
||||
* @param obj.blobSequenceNumber - sequence number for a PageBlob
|
||||
* @param obj.blobAccessTierChangeTime - date of change of tier
|
||||
* @param obj.blobUncommitted - A block has been put for a
|
||||
* @param {number} obj.blobSequenceNumber - sequence number for a PageBlob
|
||||
* @param {Date} obj.blobAccessTierChangeTime - date of change of tier
|
||||
* @param {boolean} obj.blobUncommitted - A block has been put for a
|
||||
* nonexistent blob which is about to be created
|
||||
*/
|
||||
constructor(obj: {
|
||||
containerPublicAccess: string;
|
||||
containerStoredAccessPolicies: any[];
|
||||
containerImmutabilityPolicy: any;
|
||||
containerLegalHoldStatus: boolean;
|
||||
containerDeletionInProgress: boolean;
|
||||
blobType: string;
|
||||
blobContentMD5: string;
|
||||
blobIssuedETag: string;
|
||||
blobCopyInfo: any;
|
||||
blobSequenceNumber: number;
|
||||
blobAccessTierChangeTime: Date;
|
||||
blobUncommitted: boolean;
|
||||
}) {
|
||||
constructor(obj) {
|
||||
this._data = {
|
||||
containerPublicAccess: obj.containerPublicAccess,
|
||||
containerStoredAccessPolicies: obj.containerStoredAccessPolicies,
|
||||
|
@ -77,7 +49,7 @@ export default class ObjectMDAzureInfo {
|
|||
return this._data.containerPublicAccess;
|
||||
}
|
||||
|
||||
setContainerPublicAccess(containerPublicAccess: string) {
|
||||
setContainerPublicAccess(containerPublicAccess) {
|
||||
this._data.containerPublicAccess = containerPublicAccess;
|
||||
return this;
|
||||
}
|
||||
|
@ -86,7 +58,7 @@ export default class ObjectMDAzureInfo {
|
|||
return this._data.containerStoredAccessPolicies;
|
||||
}
|
||||
|
||||
setContainerStoredAccessPolicies(containerStoredAccessPolicies: any[]) {
|
||||
setContainerStoredAccessPolicies(containerStoredAccessPolicies) {
|
||||
this._data.containerStoredAccessPolicies =
|
||||
containerStoredAccessPolicies;
|
||||
return this;
|
||||
|
@ -96,7 +68,7 @@ export default class ObjectMDAzureInfo {
|
|||
return this._data.containerImmutabilityPolicy;
|
||||
}
|
||||
|
||||
setContainerImmutabilityPolicy(containerImmutabilityPolicy: any) {
|
||||
setContainerImmutabilityPolicy(containerImmutabilityPolicy) {
|
||||
this._data.containerImmutabilityPolicy = containerImmutabilityPolicy;
|
||||
return this;
|
||||
}
|
||||
|
@ -105,7 +77,7 @@ export default class ObjectMDAzureInfo {
|
|||
return this._data.containerLegalHoldStatus;
|
||||
}
|
||||
|
||||
setContainerLegalHoldStatus(containerLegalHoldStatus: boolean) {
|
||||
setContainerLegalHoldStatus(containerLegalHoldStatus) {
|
||||
this._data.containerLegalHoldStatus = containerLegalHoldStatus;
|
||||
return this;
|
||||
}
|
||||
|
@ -114,7 +86,7 @@ export default class ObjectMDAzureInfo {
|
|||
return this._data.containerDeletionInProgress;
|
||||
}
|
||||
|
||||
setContainerDeletionInProgress(containerDeletionInProgress: boolean) {
|
||||
setContainerDeletionInProgress(containerDeletionInProgress) {
|
||||
this._data.containerDeletionInProgress = containerDeletionInProgress;
|
||||
return this;
|
||||
}
|
||||
|
@ -123,7 +95,7 @@ export default class ObjectMDAzureInfo {
|
|||
return this._data.blobType;
|
||||
}
|
||||
|
||||
setBlobType(blobType: string) {
|
||||
setBlobType(blobType) {
|
||||
this._data.blobType = blobType;
|
||||
return this;
|
||||
}
|
||||
|
@ -132,7 +104,7 @@ export default class ObjectMDAzureInfo {
|
|||
return this._data.blobContentMD5;
|
||||
}
|
||||
|
||||
setBlobContentMD5(blobContentMD5: string) {
|
||||
setBlobContentMD5(blobContentMD5) {
|
||||
this._data.blobContentMD5 = blobContentMD5;
|
||||
return this;
|
||||
}
|
||||
|
@ -141,7 +113,7 @@ export default class ObjectMDAzureInfo {
|
|||
return this._data.blobIssuedETag;
|
||||
}
|
||||
|
||||
setBlobIssuedETag(blobIssuedETag: string) {
|
||||
setBlobIssuedETag(blobIssuedETag) {
|
||||
this._data.blobIssuedETag = blobIssuedETag;
|
||||
return this;
|
||||
}
|
||||
|
@ -150,7 +122,7 @@ export default class ObjectMDAzureInfo {
|
|||
return this._data.blobCopyInfo;
|
||||
}
|
||||
|
||||
setBlobCopyInfo(blobCopyInfo: any) {
|
||||
setBlobCopyInfo(blobCopyInfo) {
|
||||
this._data.blobCopyInfo = blobCopyInfo;
|
||||
return this;
|
||||
}
|
||||
|
@ -159,7 +131,7 @@ export default class ObjectMDAzureInfo {
|
|||
return this._data.blobSequenceNumber;
|
||||
}
|
||||
|
||||
setBlobSequenceNumber(blobSequenceNumber: number) {
|
||||
setBlobSequenceNumber(blobSequenceNumber) {
|
||||
this._data.blobSequenceNumber = blobSequenceNumber;
|
||||
return this;
|
||||
}
|
||||
|
@ -168,7 +140,7 @@ export default class ObjectMDAzureInfo {
|
|||
return this._data.blobAccessTierChangeTime;
|
||||
}
|
||||
|
||||
setBlobAccessTierChangeTime(blobAccessTierChangeTime: Date) {
|
||||
setBlobAccessTierChangeTime(blobAccessTierChangeTime) {
|
||||
this._data.blobAccessTierChangeTime = blobAccessTierChangeTime;
|
||||
return this;
|
||||
}
|
||||
|
@ -177,7 +149,7 @@ export default class ObjectMDAzureInfo {
|
|||
return this._data.blobUncommitted;
|
||||
}
|
||||
|
||||
setBlobUncommitted(blobUncommitted: boolean) {
|
||||
setBlobUncommitted(blobUncommitted) {
|
||||
this._data.blobUncommitted = blobUncommitted;
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -1,49 +1,28 @@
|
|||
export type Ciphered = { cryptoScheme: number; cipheredDataKey: string };
|
||||
export type BaseLocation = { key: string; dataStoreName: string };
|
||||
export type Location = BaseLocation & {
|
||||
start: number;
|
||||
size: number;
|
||||
dataStoreETag: string;
|
||||
dataStoreVersionId: string;
|
||||
blockId?: string;
|
||||
};
|
||||
export type ObjectMDLocationData = {
|
||||
key: string;
|
||||
start: number;
|
||||
size: number;
|
||||
dataStoreName: string;
|
||||
dataStoreETag: string;
|
||||
dataStoreVersionId: string;
|
||||
blockId?: string;
|
||||
cryptoScheme?: number;
|
||||
cipheredDataKey?: string;
|
||||
};
|
||||
/**
|
||||
* Helper class to ease access to a single data location in metadata
|
||||
* 'location' array
|
||||
*/
|
||||
export default class ObjectMDLocation {
|
||||
_data: ObjectMDLocationData;
|
||||
/**
|
||||
* @constructor
|
||||
* @param locationObj - single data location info
|
||||
* @param locationObj.key - data backend key
|
||||
* @param locationObj.start - index of first data byte of
|
||||
* @param {object} locationObj - single data location info
|
||||
* @param {string} locationObj.key - data backend key
|
||||
* @param {number} locationObj.start - index of first data byte of
|
||||
* this part in the full object
|
||||
* @param locationObj.size - byte length of data part
|
||||
* @param locationObj.dataStoreName - type of data store
|
||||
* @param locationObj.dataStoreETag - internal ETag of
|
||||
* @param {number} locationObj.size - byte length of data part
|
||||
* @param {string} locationObj.dataStoreName - type of data store
|
||||
* @param {string} locationObj.dataStoreETag - internal ETag of
|
||||
* data part
|
||||
* @param [locationObj.dataStoreVersionId] - versionId,
|
||||
* @param {string} [locationObj.dataStoreVersionId] - versionId,
|
||||
* needed for cloud backends
|
||||
* @param [location.cryptoScheme] - if location data is
|
||||
* @param {number} [location.cryptoScheme] - if location data is
|
||||
* encrypted: the encryption scheme version
|
||||
* @param [location.cipheredDataKey] - if location data
|
||||
* @param {string} [location.cipheredDataKey] - if location data
|
||||
* is encrypted: the base64-encoded ciphered data key
|
||||
* @param [locationObj.blockId] - blockId of the part,
|
||||
* @param {string} [locationObj.blockId] - blockId of the part,
|
||||
* set by the Azure Blob Service REST API frontend
|
||||
*/
|
||||
constructor(locationObj: Location | (Location & Ciphered)) {
|
||||
constructor(locationObj) {
|
||||
this._data = {
|
||||
key: locationObj.key,
|
||||
start: locationObj.start,
|
||||
|
@ -53,7 +32,7 @@ export default class ObjectMDLocation {
|
|||
dataStoreVersionId: locationObj.dataStoreVersionId,
|
||||
blockId: locationObj.blockId,
|
||||
};
|
||||
if ('cryptoScheme' in locationObj) {
|
||||
if (locationObj.cryptoScheme) {
|
||||
this._data.cryptoScheme = locationObj.cryptoScheme;
|
||||
this._data.cipheredDataKey = locationObj.cipheredDataKey;
|
||||
}
|
||||
|
@ -70,17 +49,17 @@ export default class ObjectMDLocation {
|
|||
/**
|
||||
* Update data location with new info
|
||||
*
|
||||
* @param location - single data location info
|
||||
* @param location.key - data backend key
|
||||
* @param location.dataStoreName - type of data store
|
||||
* @param [location.dataStoreVersionId] - data backend version ID
|
||||
* @param [location.cryptoScheme] - if location data is
|
||||
* @param {object} location - single data location info
|
||||
* @param {string} location.key - data backend key
|
||||
* @param {string} location.dataStoreName - type of data store
|
||||
* @param {string} [location.dataStoreVersionId] - data backend version ID
|
||||
* @param {number} [location.cryptoScheme] - if location data is
|
||||
* encrypted: the encryption scheme version
|
||||
* @param [location.cipheredDataKey] - if location data
|
||||
* @param {string} [location.cipheredDataKey] - if location data
|
||||
* is encrypted: the base64-encoded ciphered data key
|
||||
* @return return this
|
||||
* @return {ObjectMDLocation} return this
|
||||
*/
|
||||
setDataLocation(location: BaseLocation | (BaseLocation & Ciphered)) {
|
||||
setDataLocation(location) {
|
||||
[
|
||||
'key',
|
||||
'dataStoreName',
|
||||
|
@ -117,16 +96,11 @@ export default class ObjectMDLocation {
|
|||
return this._data.start;
|
||||
}
|
||||
|
||||
setPartStart(start: number) {
|
||||
this._data.start = start;
|
||||
return this;
|
||||
}
|
||||
|
||||
getPartSize() {
|
||||
return this._data.size;
|
||||
}
|
||||
|
||||
setPartSize(size: number) {
|
||||
setPartSize(size) {
|
||||
this._data.size = size;
|
||||
return this;
|
||||
}
|
||||
|
@ -143,7 +117,7 @@ export default class ObjectMDLocation {
|
|||
return this._data.blockId;
|
||||
}
|
||||
|
||||
setBlockId(blockId: string) {
|
||||
setBlockId(blockId) {
|
||||
this._data.blockId = blockId;
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -1,16 +1,17 @@
|
|||
import assert from 'assert';
|
||||
import UUID from 'uuid';
|
||||
|
||||
import { RequestLogger } from 'werelogs';
|
||||
|
||||
import escapeForXml from '../s3middleware/escapeForXml';
|
||||
import errors from '../errors';
|
||||
import { isValidBucketName } from '../s3routes/routesUtils';
|
||||
import { Status } from './LifecycleRule';
|
||||
|
||||
const MAX_RULES = 1000;
|
||||
const RULE_ID_LIMIT = 255;
|
||||
const validStorageClasses = ['STANDARD', 'STANDARD_IA', 'REDUCED_REDUNDANCY'];
|
||||
const validStorageClasses = [
|
||||
'STANDARD',
|
||||
'STANDARD_IA',
|
||||
'REDUCED_REDUNDANCY',
|
||||
];
|
||||
|
||||
/**
|
||||
Example XML request:
|
||||
|
@ -36,45 +37,27 @@ const validStorageClasses = ['STANDARD', 'STANDARD_IA', 'REDUCED_REDUNDANCY'];
|
|||
</ReplicationConfiguration>
|
||||
*/
|
||||
|
||||
export type Rule = {
|
||||
prefix: string;
|
||||
enabled: boolean;
|
||||
id: string;
|
||||
storageClass?: any;
|
||||
};
|
||||
|
||||
export type Destination = { StorageClass: string[]; Bucket: string };
|
||||
export type XMLRule = {
|
||||
Prefix: string[];
|
||||
Status: Status[];
|
||||
ID?: string[];
|
||||
Destination: Destination[];
|
||||
Transition?: any[];
|
||||
NoncurrentVersionTransition?: any[];
|
||||
Filter?: string;
|
||||
};
|
||||
|
||||
export default class ReplicationConfiguration {
|
||||
_parsedXML: any;
|
||||
_log: RequestLogger;
|
||||
_config: any;
|
||||
_configPrefixes: string[];
|
||||
_configIDs: string[];
|
||||
_role: string | null;
|
||||
_destination: string | null;
|
||||
_rules: Rule[] | null;
|
||||
_prevStorageClass: null;
|
||||
_hasScalityDestination: boolean | null;
|
||||
_preferredReadLocation: string | null;
|
||||
_parsedXML
|
||||
_log
|
||||
_config
|
||||
_configPrefixes
|
||||
_configIDs
|
||||
_role
|
||||
_destination
|
||||
_rules
|
||||
_prevStorageClass
|
||||
_hasScalityDestination
|
||||
_preferredReadLocation
|
||||
|
||||
/**
|
||||
* Create a ReplicationConfiguration instance
|
||||
* @param xml - The parsed XML
|
||||
* @param log - Werelogs logger
|
||||
* @param config - S3 server configuration
|
||||
* @return - ReplicationConfiguration instance
|
||||
* @param {string} xml - The parsed XML
|
||||
* @param {object} log - Werelogs logger
|
||||
* @param {object} config - S3 server configuration
|
||||
* @return {object} - ReplicationConfiguration instance
|
||||
*/
|
||||
constructor(xml: any, log: RequestLogger, config: any) {
|
||||
constructor(xml, log, config) {
|
||||
this._parsedXML = xml;
|
||||
this._log = log;
|
||||
this._config = config;
|
||||
|
@ -93,7 +76,7 @@ export default class ReplicationConfiguration {
|
|||
|
||||
/**
|
||||
* Get the role of the bucket replication configuration
|
||||
* @return - The role if defined, otherwise `null`
|
||||
* @return {string|null} - The role if defined, otherwise `null`
|
||||
*/
|
||||
getRole() {
|
||||
return this._role;
|
||||
|
@ -101,7 +84,7 @@ export default class ReplicationConfiguration {
|
|||
|
||||
/**
|
||||
* The bucket to replicate data to
|
||||
* @return - The bucket if defined, otherwise `null`
|
||||
* @return {string|null} - The bucket if defined, otherwise `null`
|
||||
*/
|
||||
getDestination() {
|
||||
return this._destination;
|
||||
|
@ -109,7 +92,7 @@ export default class ReplicationConfiguration {
|
|||
|
||||
/**
|
||||
* The rules for replication configuration
|
||||
* @return - The rules if defined, otherwise `null`
|
||||
* @return {string|null} - The rules if defined, otherwise `null`
|
||||
*/
|
||||
getRules() {
|
||||
return this._rules;
|
||||
|
@ -129,7 +112,7 @@ export default class ReplicationConfiguration {
|
|||
|
||||
/**
|
||||
* Get the replication configuration
|
||||
* @return - The replication configuration
|
||||
* @return {object} - The replication configuration
|
||||
*/
|
||||
getReplicationConfiguration() {
|
||||
return {
|
||||
|
@ -142,22 +125,18 @@ export default class ReplicationConfiguration {
|
|||
|
||||
/**
|
||||
* Build the rule object from the parsed XML of the given rule
|
||||
* @param rule - The rule object from this._parsedXML
|
||||
* @return - The rule object to push into the `Rules` array
|
||||
* @param {object} rule - The rule object from this._parsedXML
|
||||
* @return {object} - The rule object to push into the `Rules` array
|
||||
*/
|
||||
_buildRuleObject(rule: XMLRule) {
|
||||
const base = {
|
||||
id: '',
|
||||
_buildRuleObject(rule) {
|
||||
const obj = {
|
||||
prefix: rule.Prefix[0],
|
||||
enabled: rule.Status[0] === 'Enabled',
|
||||
};
|
||||
const obj: Rule = { ...base };
|
||||
// ID is an optional property, but create one if not provided or is ''.
|
||||
// We generate a 48-character alphanumeric, unique ID for the rule.
|
||||
obj.id =
|
||||
rule.ID && rule.ID[0] !== ''
|
||||
? rule.ID[0]
|
||||
: Buffer.from(UUID.v4()).toString('base64');
|
||||
obj.id = rule.ID && rule.ID[0] !== '' ? rule.ID[0] :
|
||||
Buffer.from(UUID.v4()).toString('base64');
|
||||
// StorageClass is an optional property.
|
||||
if (rule.Destination[0].StorageClass) {
|
||||
obj.storageClass = rule.Destination[0].StorageClass[0];
|
||||
|
@ -167,10 +146,10 @@ export default class ReplicationConfiguration {
|
|||
|
||||
/**
|
||||
* Check if the Role field of the replication configuration is valid
|
||||
* @param ARN - The Role field value provided in the configuration
|
||||
* @return `true` if a valid role ARN, `false` otherwise
|
||||
* @param {string} ARN - The Role field value provided in the configuration
|
||||
* @return {boolean} `true` if a valid role ARN, `false` otherwise
|
||||
*/
|
||||
_isValidRoleARN(ARN: string) {
|
||||
_isValidRoleARN(ARN) {
|
||||
// AWS accepts a range of values for the Role field. Though this does
|
||||
// not encompass all constraints imposed by AWS, we have opted to
|
||||
// enforce the following.
|
||||
|
@ -187,32 +166,30 @@ export default class ReplicationConfiguration {
|
|||
|
||||
/**
|
||||
* Check that the `Role` property of the configuration is valid
|
||||
* @return {undefined}
|
||||
*/
|
||||
_parseRole() {
|
||||
const parsedRole = this._parsedXML.ReplicationConfiguration.Role;
|
||||
if (!parsedRole) {
|
||||
return errors.MalformedXML;
|
||||
}
|
||||
const role: string = parsedRole[0];
|
||||
const role = parsedRole[0];
|
||||
const rolesArr = role.split(',');
|
||||
if (this._hasScalityDestination && rolesArr.length !== 2) {
|
||||
return errors.InvalidArgument.customizeDescription(
|
||||
'Invalid Role specified in replication configuration: ' +
|
||||
'Role must be a comma-separated list of two IAM roles'
|
||||
);
|
||||
'Role must be a comma-separated list of two IAM roles');
|
||||
}
|
||||
if (!this._hasScalityDestination && rolesArr.length > 1) {
|
||||
return errors.InvalidArgument.customizeDescription(
|
||||
'Invalid Role specified in replication configuration: ' +
|
||||
'Role may not contain a comma separator'
|
||||
);
|
||||
'Role may not contain a comma separator');
|
||||
}
|
||||
const invalidRole = rolesArr.find((r) => !this._isValidRoleARN(r));
|
||||
const invalidRole = rolesArr.find(r => !this._isValidRoleARN(r));
|
||||
if (invalidRole !== undefined) {
|
||||
return errors.InvalidArgument.customizeDescription(
|
||||
'Invalid Role specified in replication configuration: ' +
|
||||
`'${invalidRole}'`
|
||||
);
|
||||
`'${invalidRole}'`);
|
||||
}
|
||||
this._role = role;
|
||||
return undefined;
|
||||
|
@ -220,6 +197,7 @@ export default class ReplicationConfiguration {
|
|||
|
||||
/**
|
||||
* Check that the `Rules` property array is valid
|
||||
* @return {undefined}
|
||||
*/
|
||||
_parseRules() {
|
||||
// Note that the XML uses 'Rule' while the config object uses 'Rules'.
|
||||
|
@ -229,8 +207,7 @@ export default class ReplicationConfiguration {
|
|||
}
|
||||
if (Rule.length > MAX_RULES) {
|
||||
return errors.InvalidRequest.customizeDescription(
|
||||
'Number of defined replication rules cannot exceed 1000'
|
||||
);
|
||||
'Number of defined replication rules cannot exceed 1000');
|
||||
}
|
||||
const err = this._parseEachRule(Rule);
|
||||
if (err) {
|
||||
|
@ -241,16 +218,15 @@ export default class ReplicationConfiguration {
|
|||
|
||||
/**
|
||||
* Check that each rule in the `Rules` property array is valid
|
||||
* @param rules - The rule array from this._parsedXML
|
||||
* @param {array} rules - The rule array from this._parsedXML
|
||||
* @return {undefined}
|
||||
*/
|
||||
_parseEachRule(rules: XMLRule[]) {
|
||||
const rulesArr: Rule[] = [];
|
||||
_parseEachRule(rules) {
|
||||
const rulesArr = [];
|
||||
for (let i = 0; i < rules.length; i++) {
|
||||
const err =
|
||||
this._parseStatus(rules[i]) ||
|
||||
this._parsePrefix(rules[i]) ||
|
||||
this._parseID(rules[i]) ||
|
||||
this._parseDestination(rules[i]);
|
||||
this._parseStatus(rules[i]) || this._parsePrefix(rules[i]) ||
|
||||
this._parseID(rules[i]) || this._parseDestination(rules[i]);
|
||||
if (err) {
|
||||
return err;
|
||||
}
|
||||
|
@ -262,9 +238,10 @@ export default class ReplicationConfiguration {
|
|||
|
||||
/**
|
||||
* Check that the `Status` property is valid
|
||||
* @param rule - The rule object from this._parsedXML
|
||||
* @param {object} rule - The rule object from this._parsedXML
|
||||
* @return {undefined}
|
||||
*/
|
||||
_parseStatus(rule: XMLRule) {
|
||||
_parseStatus(rule) {
|
||||
const status = rule.Status && rule.Status[0];
|
||||
if (!status || !['Enabled', 'Disabled'].includes(status)) {
|
||||
return errors.MalformedXML;
|
||||
|
@ -274,19 +251,18 @@ export default class ReplicationConfiguration {
|
|||
|
||||
/**
|
||||
* Check that the `Prefix` property is valid
|
||||
* @param rule - The rule object from this._parsedXML
|
||||
* @param {object} rule - The rule object from this._parsedXML
|
||||
* @return {undefined}
|
||||
*/
|
||||
_parsePrefix(rule: XMLRule) {
|
||||
_parsePrefix(rule) {
|
||||
const prefix = rule.Prefix && rule.Prefix[0];
|
||||
// An empty string prefix should be allowed.
|
||||
if (!prefix && prefix !== '') {
|
||||
return errors.MalformedXML;
|
||||
}
|
||||
if (prefix.length > 1024) {
|
||||
return errors.InvalidArgument.customizeDescription(
|
||||
'Rule prefix ' +
|
||||
'cannot be longer than maximum allowed key length of 1024'
|
||||
);
|
||||
return errors.InvalidArgument.customizeDescription('Rule prefix ' +
|
||||
'cannot be longer than maximum allowed key length of 1024');
|
||||
}
|
||||
// Each Prefix in a list of rules must not overlap. For example, two
|
||||
// prefixes 'TaxDocs' and 'TaxDocs/2015' are overlapping. An empty
|
||||
|
@ -294,9 +270,8 @@ export default class ReplicationConfiguration {
|
|||
for (let i = 0; i < this._configPrefixes.length; i++) {
|
||||
const used = this._configPrefixes[i];
|
||||
if (prefix.startsWith(used) || used.startsWith(prefix)) {
|
||||
return errors.InvalidRequest.customizeDescription(
|
||||
'Found ' + `overlapping prefixes '${used}' and '${prefix}'`
|
||||
);
|
||||
return errors.InvalidRequest.customizeDescription('Found ' +
|
||||
`overlapping prefixes '${used}' and '${prefix}'`);
|
||||
}
|
||||
}
|
||||
this._configPrefixes.push(prefix);
|
||||
|
@ -305,20 +280,19 @@ export default class ReplicationConfiguration {
|
|||
|
||||
/**
|
||||
* Check that the `ID` property is valid
|
||||
* @param rule - The rule object from this._parsedXML
|
||||
* @param {object} rule - The rule object from this._parsedXML
|
||||
* @return {undefined}
|
||||
*/
|
||||
_parseID(rule: XMLRule) {
|
||||
_parseID(rule) {
|
||||
const id = rule.ID && rule.ID[0];
|
||||
if (id && id.length > RULE_ID_LIMIT) {
|
||||
return errors.InvalidArgument.customizeDescription(
|
||||
'Rule Id cannot be greater than 255'
|
||||
);
|
||||
return errors.InvalidArgument
|
||||
.customizeDescription('Rule Id cannot be greater than 255');
|
||||
}
|
||||
// Each ID in a list of rules must be unique.
|
||||
if (id && this._configIDs.includes(id)) {
|
||||
if (this._configIDs.includes(id)) {
|
||||
return errors.InvalidRequest.customizeDescription(
|
||||
'Rule Id must be unique'
|
||||
);
|
||||
'Rule Id must be unique');
|
||||
}
|
||||
if (id !== undefined) {
|
||||
this._configIDs.push(id);
|
||||
|
@ -328,14 +302,15 @@ export default class ReplicationConfiguration {
|
|||
|
||||
/**
|
||||
* Check that the `StorageClass` property is valid
|
||||
* @param destination - The destination object from this._parsedXML
|
||||
* @param {object} destination - The destination object from this._parsedXML
|
||||
* @return {undefined}
|
||||
*/
|
||||
_parseStorageClass(destination: Destination) {
|
||||
_parseStorageClass(destination) {
|
||||
const { replicationEndpoints } = this._config;
|
||||
// The only condition where the default endpoint is possibly undefined
|
||||
// is if there is only a single replication endpoint.
|
||||
const defaultEndpoint =
|
||||
replicationEndpoints.find((endpoint: any) => endpoint.default) ||
|
||||
replicationEndpoints.find(endpoint => endpoint.default) ||
|
||||
replicationEndpoints[0];
|
||||
// StorageClass is optional.
|
||||
if (destination.StorageClass === undefined) {
|
||||
|
@ -357,15 +332,9 @@ export default class ReplicationConfiguration {
|
|||
defaultEndpoint.type === undefined;
|
||||
return true;
|
||||
}
|
||||
const endpoint = replicationEndpoints.find(
|
||||
(endpoint: any) => endpoint.site === storageClass
|
||||
);
|
||||
const endpoint = replicationEndpoints.find(endpoint =>
|
||||
endpoint.site === storageClass);
|
||||
if (endpoint) {
|
||||
// We do not support replication to cold location.
|
||||
// Only transition to cold location is supported.
|
||||
if (endpoint.site && this._config.locationConstraints[endpoint.site]?.isCold) {
|
||||
return false;
|
||||
}
|
||||
// If this._hasScalityDestination was not set to true in any
|
||||
// previous iteration or by a prior rule's storage class, then
|
||||
// check if the current endpoint is a Scality destination.
|
||||
|
@ -386,9 +355,10 @@ export default class ReplicationConfiguration {
|
|||
|
||||
/**
|
||||
* Check that the `Bucket` property is valid
|
||||
* @param destination - The destination object from this._parsedXML
|
||||
* @param {object} destination - The destination object from this._parsedXML
|
||||
* @return {undefined}
|
||||
*/
|
||||
_parseBucket(destination: Destination) {
|
||||
_parseBucket(destination) {
|
||||
const parsedBucketARN = destination.Bucket;
|
||||
// If there is no Scality destination, we get the destination bucket
|
||||
// from the location configuration.
|
||||
|
@ -401,8 +371,7 @@ export default class ReplicationConfiguration {
|
|||
const bucketARN = parsedBucketARN[0];
|
||||
if (!bucketARN) {
|
||||
return errors.InvalidArgument.customizeDescription(
|
||||
'Destination bucket cannot be null or empty'
|
||||
);
|
||||
'Destination bucket cannot be null or empty');
|
||||
}
|
||||
const arr = bucketARN.split(':');
|
||||
const isValidARN =
|
||||
|
@ -412,20 +381,17 @@ export default class ReplicationConfiguration {
|
|||
arr[3] === '' &&
|
||||
arr[4] === '';
|
||||
if (!isValidARN) {
|
||||
return errors.InvalidArgument.customizeDescription(
|
||||
'Invalid bucket ARN'
|
||||
);
|
||||
return errors.InvalidArgument
|
||||
.customizeDescription('Invalid bucket ARN');
|
||||
}
|
||||
if (!isValidBucketName(arr[5], [])) {
|
||||
return errors.InvalidArgument.customizeDescription(
|
||||
'The specified bucket is not valid'
|
||||
);
|
||||
return errors.InvalidArgument
|
||||
.customizeDescription('The specified bucket is not valid');
|
||||
}
|
||||
// We can replicate objects only to one destination bucket.
|
||||
if (this._destination && this._destination !== bucketARN) {
|
||||
return errors.InvalidRequest.customizeDescription(
|
||||
'The destination bucket must be same for all rules'
|
||||
);
|
||||
'The destination bucket must be same for all rules');
|
||||
}
|
||||
this._destination = bucketARN;
|
||||
return undefined;
|
||||
|
@ -433,9 +399,10 @@ export default class ReplicationConfiguration {
|
|||
|
||||
/**
|
||||
* Check that the `destination` property is valid
|
||||
* @param rule - The rule object from this._parsedXML
|
||||
* @param {object} rule - The rule object from this._parsedXML
|
||||
* @return {undefined}
|
||||
*/
|
||||
_parseDestination(rule: XMLRule) {
|
||||
_parseDestination(rule) {
|
||||
const dest = rule.Destination && rule.Destination[0];
|
||||
if (!dest) {
|
||||
return errors.MalformedXML;
|
||||
|
@ -449,6 +416,7 @@ export default class ReplicationConfiguration {
|
|||
|
||||
/**
|
||||
* Check that the request configuration is valid
|
||||
* @return {undefined}
|
||||
*/
|
||||
parseConfiguration() {
|
||||
const err = this._parseRules();
|
||||
|
@ -460,62 +428,48 @@ export default class ReplicationConfiguration {
|
|||
|
||||
/**
|
||||
* Get the XML representation of the configuration object
|
||||
* @param config - The bucket replication configuration
|
||||
* @return - The XML representation of the configuration
|
||||
* @param {object} config - The bucket replication configuration
|
||||
* @return {string} - The XML representation of the configuration
|
||||
*/
|
||||
static getConfigXML(config: {
|
||||
role: string;
|
||||
destination: string;
|
||||
rules: Rule[];
|
||||
}) {
|
||||
static getConfigXML(config) {
|
||||
const { role, destination, rules } = config;
|
||||
const Role = `<Role>${escapeForXml(role)}</Role>`;
|
||||
const Bucket = `<Bucket>${escapeForXml(destination)}</Bucket>`;
|
||||
const rulesXML = rules
|
||||
.map((rule) => {
|
||||
const rulesXML = rules.map(rule => {
|
||||
const { prefix, enabled, storageClass, id } = rule;
|
||||
const Prefix =
|
||||
prefix === ''
|
||||
? '<Prefix/>'
|
||||
: `<Prefix>${escapeForXml(prefix)}</Prefix>`;
|
||||
const Status = `<Status>${
|
||||
enabled ? 'Enabled' : 'Disabled'
|
||||
}</Status>`;
|
||||
const StorageClass = storageClass
|
||||
? `<StorageClass>${storageClass}</StorageClass>`
|
||||
: '';
|
||||
const Destination = `<Destination>${Bucket}${StorageClass}</Destination>`;
|
||||
const Prefix = prefix === '' ? '<Prefix/>' :
|
||||
`<Prefix>${escapeForXml(prefix)}</Prefix>`;
|
||||
const Status =
|
||||
`<Status>${enabled ? 'Enabled' : 'Disabled'}</Status>`;
|
||||
const StorageClass = storageClass ?
|
||||
`<StorageClass>${storageClass}</StorageClass>` : '';
|
||||
const Destination =
|
||||
`<Destination>${Bucket}${StorageClass}</Destination>`;
|
||||
// If the ID property was omitted in the configuration object, we
|
||||
// create an ID for the rule. Hence it is always defined.
|
||||
const ID = `<ID>${escapeForXml(id)}</ID>`;
|
||||
return `<Rule>${ID}${Prefix}${Status}${Destination}</Rule>`;
|
||||
})
|
||||
.join('');
|
||||
return (
|
||||
'<?xml version="1.0" encoding="UTF-8"?>' +
|
||||
}).join('');
|
||||
return '<?xml version="1.0" encoding="UTF-8"?>' +
|
||||
'<ReplicationConfiguration ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
`${rulesXML}${Role}` +
|
||||
'</ReplicationConfiguration>'
|
||||
);
|
||||
'</ReplicationConfiguration>';
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate the bucket metadata replication configuration structure and
|
||||
* value types
|
||||
* @param config - The replication configuration to validate
|
||||
* @param {object} config - The replication configuration to validate
|
||||
* @return {undefined}
|
||||
*/
|
||||
static validateConfig(config: {
|
||||
role: string;
|
||||
destination: string;
|
||||
rules: Rule[];
|
||||
}) {
|
||||
static validateConfig(config) {
|
||||
assert.strictEqual(typeof config, 'object');
|
||||
const { role, rules, destination } = config;
|
||||
assert.strictEqual(typeof role, 'string');
|
||||
assert.strictEqual(typeof destination, 'string');
|
||||
assert.strictEqual(Array.isArray(rules), true);
|
||||
rules.forEach((rule) => {
|
||||
rules.forEach(rule => {
|
||||
assert.strictEqual(typeof rule, 'object');
|
||||
const { prefix, enabled, id, storageClass } = rule;
|
||||
assert.strictEqual(typeof prefix, 'string');
|
||||
|
|
|
@ -1,41 +1,25 @@
|
|||
/**
|
||||
* @param protocol - protocol to use for redirect
|
||||
* @param hostName - hostname to use for redirect
|
||||
* @param replaceKeyPrefixWith - string to replace keyPrefixEquals specified in condition
|
||||
* @param replaceKeyWith - string to replace key
|
||||
* @param httpRedirectCode - http redirect code
|
||||
*/
|
||||
export type Redirect = {
|
||||
protocol?: string;
|
||||
hostName?: string;
|
||||
replaceKeyPrefixWith?: string;
|
||||
replaceKeyWith?: string;
|
||||
httpRedirectCode: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param keyPrefixEquals - key prefix that triggers a redirect
|
||||
* @param httpErrorCodeReturnedEquals - http code that triggers a redirect
|
||||
*/
|
||||
export type Condition = {
|
||||
keyPrefixEquals?: string;
|
||||
httpErrorCodeReturnedEquals?: string;
|
||||
};
|
||||
|
||||
export type RoutingRuleParams = { redirect: Redirect; condition?: Condition };
|
||||
|
||||
export class RoutingRule {
|
||||
_redirect?: Redirect;
|
||||
_condition?: Condition;
|
||||
_redirect;
|
||||
_condition;
|
||||
|
||||
/**
|
||||
* Represents a routing rule in a website configuration.
|
||||
* @constructor
|
||||
* @param params - object containing redirect and condition objects
|
||||
* @param params.redirect - specifies how to redirect requests
|
||||
* @param [params.condition] - specifies conditions for a redirect
|
||||
* @param {object} params - object containing redirect and condition objects
|
||||
* @param {object} params.redirect - specifies how to redirect requests
|
||||
* @param {string} [params.redirect.protocol] - protocol to use for redirect
|
||||
* @param {string} [params.redirect.hostName] - hostname to use for redirect
|
||||
* @param {string} [params.redirect.replaceKeyPrefixWith] - string to replace
|
||||
* keyPrefixEquals specified in condition
|
||||
* @param {string} [params.redirect.replaceKeyWith] - string to replace key
|
||||
* @param {string} [params.redirect.httpRedirectCode] - http redirect code
|
||||
* @param {object} [params.condition] - specifies conditions for a redirect
|
||||
* @param {string} [params.condition.keyPrefixEquals] - key prefix that
|
||||
* triggers a redirect
|
||||
* @param {string} [params.condition.httpErrorCodeReturnedEquals] - http code
|
||||
* that triggers a redirect
|
||||
*/
|
||||
constructor(params?: RoutingRuleParams) {
|
||||
constructor(params) {
|
||||
if (params) {
|
||||
this._redirect = params.redirect;
|
||||
this._condition = params.condition;
|
||||
|
@ -44,7 +28,7 @@ export class RoutingRule {
|
|||
|
||||
/**
|
||||
* Return copy of rule as plain object
|
||||
* @return rule;
|
||||
* @return {object} rule;
|
||||
*/
|
||||
getRuleObject() {
|
||||
const rule = {
|
||||
|
@ -56,7 +40,7 @@ export class RoutingRule {
|
|||
|
||||
/**
|
||||
* Return the condition object
|
||||
* @return condition;
|
||||
* @return {object} condition;
|
||||
*/
|
||||
getCondition() {
|
||||
return this._condition;
|
||||
|
@ -64,45 +48,36 @@ export class RoutingRule {
|
|||
|
||||
/**
|
||||
* Return the redirect object
|
||||
* @return redirect;
|
||||
* @return {object} redirect;
|
||||
*/
|
||||
getRedirect() {
|
||||
return this._redirect;
|
||||
}
|
||||
}
|
||||
|
||||
export type RedirectAllRequestsTo = {
|
||||
hostName: string;
|
||||
protocol?: string;
|
||||
};
|
||||
export class WebsiteConfiguration {
|
||||
_indexDocument?: string;
|
||||
_errorDocument?: string;
|
||||
_redirectAllRequestsTo?: RedirectAllRequestsTo;
|
||||
_routingRules?: RoutingRule[];
|
||||
_indexDocument;
|
||||
_errorDocument;
|
||||
_redirectAllRequestsTo;
|
||||
_routingRules;
|
||||
|
||||
/**
|
||||
* Object that represents website configuration
|
||||
* @constructor
|
||||
* @param params - object containing params to construct Object
|
||||
* @param params.indexDocument - key for index document object
|
||||
* @param {object} params - object containing params to construct Object
|
||||
* @param {string} params.indexDocument - key for index document object
|
||||
* required when redirectAllRequestsTo is undefined
|
||||
* @param [params.errorDocument] - key for error document object
|
||||
* @param params.redirectAllRequestsTo - object containing info
|
||||
* @param {string} [params.errorDocument] - key for error document object
|
||||
* @param {object} params.redirectAllRequestsTo - object containing info
|
||||
* about how to redirect all requests
|
||||
* @param params.redirectAllRequestsTo.hostName - hostName to use
|
||||
* @param {string} params.redirectAllRequestsTo.hostName - hostName to use
|
||||
* when redirecting all requests
|
||||
* @param [params.redirectAllRequestsTo.protocol] - protocol to use
|
||||
* @param {string} [params.redirectAllRequestsTo.protocol] - protocol to use
|
||||
* when redirecting all requests ('http' or 'https')
|
||||
* @param params.routingRules - array of Routing
|
||||
* @param {(RoutingRule[]|object[])} params.routingRules - array of Routing
|
||||
* Rule instances or plain routing rule objects to cast as RoutingRule's
|
||||
*/
|
||||
constructor(params: {
|
||||
indexDocument: string;
|
||||
errorDocument: string;
|
||||
redirectAllRequestsTo: RedirectAllRequestsTo;
|
||||
routingRules: RoutingRule[] | any[],
|
||||
}) {
|
||||
constructor(params) {
|
||||
if (params) {
|
||||
this._indexDocument = params.indexDocument;
|
||||
this._errorDocument = params.errorDocument;
|
||||
|
@ -113,34 +88,35 @@ export class WebsiteConfiguration {
|
|||
|
||||
/**
|
||||
* Return plain object with configuration info
|
||||
* @return - Object copy of class instance
|
||||
* @return {object} - Object copy of class instance
|
||||
*/
|
||||
getConfig() {
|
||||
const base = {
|
||||
const websiteConfig = {
|
||||
indexDocument: this._indexDocument,
|
||||
errorDocument: this._errorDocument,
|
||||
redirectAllRequestsTo: this._redirectAllRequestsTo,
|
||||
};
|
||||
if (this._routingRules) {
|
||||
const routingRules = this._routingRules.map(r => r.getRuleObject());
|
||||
return { ...base, routingRules };
|
||||
websiteConfig.routingRules =
|
||||
this._routingRules.map(rule => rule.getRuleObject());
|
||||
}
|
||||
return { ...base };
|
||||
return websiteConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the redirectAllRequestsTo
|
||||
* @param obj - object to set as redirectAllRequestsTo
|
||||
* @param obj.hostName - hostname for redirecting all requests
|
||||
* @param [obj.protocol] - protocol for redirecting all requests
|
||||
* @param {object} obj - object to set as redirectAllRequestsTo
|
||||
* @param {string} obj.hostName - hostname for redirecting all requests
|
||||
* @param {object} [obj.protocol] - protocol for redirecting all requests
|
||||
* @return {undefined};
|
||||
*/
|
||||
setRedirectAllRequestsTo(obj: { hostName: string; protocol?: string }) {
|
||||
setRedirectAllRequestsTo(obj) {
|
||||
this._redirectAllRequestsTo = obj;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the redirectAllRequestsTo object
|
||||
* @return redirectAllRequestsTo;
|
||||
* @return {object} redirectAllRequestsTo;
|
||||
*/
|
||||
getRedirectAllRequestsTo() {
|
||||
return this._redirectAllRequestsTo;
|
||||
|
@ -148,15 +124,16 @@ export class WebsiteConfiguration {
|
|||
|
||||
/**
|
||||
* Set the index document object name
|
||||
* @param suffix - index document object key
|
||||
* @param {string} suffix - index document object key
|
||||
* @return {undefined};
|
||||
*/
|
||||
setIndexDocument(suffix: string) {
|
||||
setIndexDocument(suffix) {
|
||||
this._indexDocument = suffix;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the index document object name
|
||||
* @return indexDocument
|
||||
* @return {string} indexDocument
|
||||
*/
|
||||
getIndexDocument() {
|
||||
return this._indexDocument;
|
||||
|
@ -164,15 +141,16 @@ export class WebsiteConfiguration {
|
|||
|
||||
/**
|
||||
* Set the error document object name
|
||||
* @param key - error document object key
|
||||
* @param {string} key - error document object key
|
||||
* @return {undefined};
|
||||
*/
|
||||
setErrorDocument(key: string) {
|
||||
setErrorDocument(key) {
|
||||
this._errorDocument = key;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the error document object name
|
||||
* @return errorDocument
|
||||
* @return {string} errorDocument
|
||||
*/
|
||||
getErrorDocument() {
|
||||
return this._errorDocument;
|
||||
|
@ -180,9 +158,10 @@ export class WebsiteConfiguration {
|
|||
|
||||
/**
|
||||
* Set the whole RoutingRules array
|
||||
* @param array - array to set as instance's RoutingRules
|
||||
* @param {array} array - array to set as instance's RoutingRules
|
||||
* @return {undefined};
|
||||
*/
|
||||
setRoutingRules(array?: (RoutingRule | RoutingRuleParams)[]) {
|
||||
setRoutingRules(array) {
|
||||
if (array) {
|
||||
this._routingRules = array.map(rule => {
|
||||
if (rule instanceof RoutingRule) {
|
||||
|
@ -195,9 +174,10 @@ export class WebsiteConfiguration {
|
|||
|
||||
/**
|
||||
* Add a RoutingRule instance to routingRules array
|
||||
* @param obj - rule to add to array
|
||||
* @param {object} obj - rule to add to array
|
||||
* @return {undefined};
|
||||
*/
|
||||
addRoutingRule(obj?: RoutingRule | RoutingRuleParams) {
|
||||
addRoutingRule(obj) {
|
||||
if (!this._routingRules) {
|
||||
this._routingRules = [];
|
||||
}
|
||||
|
@ -210,7 +190,7 @@ export class WebsiteConfiguration {
|
|||
|
||||
/**
|
||||
* Get routing rules
|
||||
* @return - array of RoutingRule instances
|
||||
* @return {RoutingRule[]} - array of RoutingRule instances
|
||||
*/
|
||||
getRoutingRules() {
|
||||
return this._routingRules;
|
||||
|
|
|
@ -1,16 +1,14 @@
|
|||
export { default as ARN } from './ARN';
|
||||
export { default as BackendInfo } from './BackendInfo';
|
||||
export { default as BucketAzureInfo } from './BucketAzureInfo';
|
||||
export { default as BucketInfo } from './BucketInfo';
|
||||
export { default as BucketPolicy } from './BucketPolicy';
|
||||
export { default as BucketAzureInfo } from './BucketAzureInfo';
|
||||
export { default as ObjectMD } from './ObjectMD';
|
||||
export { default as ObjectMDLocation } from './ObjectMDLocation';
|
||||
export { default as ObjectMDAzureInfo } from './ObjectMDAzureInfo';
|
||||
export { default as ARN } from './ARN';
|
||||
export * as WebsiteConfiguration from './WebsiteConfiguration';
|
||||
export { default as ReplicationConfiguration } from './ReplicationConfiguration';
|
||||
export { default as LifecycleConfiguration } from './LifecycleConfiguration';
|
||||
export { default as LifecycleRule } from './LifecycleRule';
|
||||
export { default as NotificationConfiguration } from './NotificationConfiguration';
|
||||
export { default as BucketPolicy } from './BucketPolicy';
|
||||
export { default as ObjectLockConfiguration } from './ObjectLockConfiguration';
|
||||
export { default as ObjectMD } from './ObjectMD';
|
||||
export { default as ObjectMDAmzRestore } from './ObjectMDAmzRestore';
|
||||
export { default as ObjectMDArchive } from './ObjectMDArchive';
|
||||
export { default as ObjectMDAzureInfo } from './ObjectMDAzureInfo';
|
||||
export { default as ObjectMDLocation } from './ObjectMDLocation';
|
||||
export { default as ReplicationConfiguration } from './ReplicationConfiguration';
|
||||
export * as WebsiteConfiguration from './WebsiteConfiguration';
|
||||
export { default as NotificationConfiguration } from './NotificationConfiguration';
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
import { Logger } from 'werelogs';
|
||||
|
||||
const DEFAULT_STICKY_COUNT = 100;
|
||||
|
||||
/**
|
||||
* Shuffle an array in-place
|
||||
*
|
||||
* @param array - The array to shuffle
|
||||
* @param {Array} array - The array to shuffle
|
||||
* @return {undefined}
|
||||
*/
|
||||
function shuffle(array: any[]) {
|
||||
function shuffle(array) {
|
||||
for (let i = array.length - 1; i > 0; i--) {
|
||||
const randIndex = Math.floor(Math.random() * (i + 1));
|
||||
/* eslint-disable no-param-reassign */
|
||||
|
@ -19,13 +18,6 @@ function shuffle(array: any[]) {
|
|||
}
|
||||
|
||||
export default class RoundRobin {
|
||||
logger?: Logger;
|
||||
stickyCount: number;
|
||||
defaultPort?: number;
|
||||
hostsList: { host: string; port?: number }[]
|
||||
hostIndex: number;
|
||||
pickCount: number;
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param {object[]|string[]} hostsList - list of hosts to query
|
||||
|
@ -37,10 +29,7 @@ export default class RoundRobin {
|
|||
* to send to the same host before switching to the next one
|
||||
* @param {Logger} [options.logger] - logger object
|
||||
*/
|
||||
constructor(
|
||||
hostsList: { host: string; port: number }[] | string[],
|
||||
options?: { stickyCount?: number; logger?: Logger; defaultPort?: string },
|
||||
) {
|
||||
constructor(hostsList, options) {
|
||||
if (hostsList.length === 0) {
|
||||
throw new Error(
|
||||
'at least one host must be provided for round robin');
|
||||
|
@ -55,12 +44,9 @@ export default class RoundRobin {
|
|||
}
|
||||
if (options && options.defaultPort) {
|
||||
this.defaultPort = Number.parseInt(options.defaultPort, 10);
|
||||
if (isNaN(this.defaultPort)) {
|
||||
this.defaultPort = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
this.hostsList = hostsList.map((item: any) => this._validateHostObj(item));
|
||||
this.hostsList = hostsList.map(item => this._validateHostObj(item));
|
||||
|
||||
// TODO: add blacklisting capability
|
||||
|
||||
|
@ -69,8 +55,8 @@ export default class RoundRobin {
|
|||
this.pickCount = 0;
|
||||
}
|
||||
|
||||
_validateHostObj(hostItem: string | { host: string; port: string }): { host: string; port?: number } {
|
||||
const hostItemObj = { host: '', port: '' };
|
||||
_validateHostObj(hostItem) {
|
||||
const hostItemObj = {};
|
||||
|
||||
if (typeof hostItem === 'string') {
|
||||
const hostParts = hostItem.split(':');
|
||||
|
@ -120,7 +106,7 @@ export default class RoundRobin {
|
|||
* Once all hosts have been returned once, the list is shuffled
|
||||
* and a new round-robin cycle starts.
|
||||
*
|
||||
* @return a host object with { host, port } attributes
|
||||
* @return {object} a host object with { host, port } attributes
|
||||
*/
|
||||
pickHost() {
|
||||
if (this.logger) {
|
||||
|
@ -145,7 +131,7 @@ export default class RoundRobin {
|
|||
* Once all hosts have been returned once, the list is shuffled
|
||||
* and a new round-robin cycle starts.
|
||||
*
|
||||
* @return a host object with { host, port } attributes
|
||||
* @return {object} a host object with { host, port } attributes
|
||||
*/
|
||||
pickNextHost() {
|
||||
// don't shuffle in this case because we want to force picking
|
||||
|
@ -159,13 +145,13 @@ export default class RoundRobin {
|
|||
* return the current host in round-robin, without changing the
|
||||
* round-robin state
|
||||
*
|
||||
* @return a host object with { host, port } attributes
|
||||
* @return {object} a host object with { host, port } attributes
|
||||
*/
|
||||
getCurrentHost() {
|
||||
return this.hostsList[this.hostIndex];
|
||||
}
|
||||
|
||||
_roundRobinCurrentHost(params: { shuffle?: boolean }) {
|
||||
_roundRobinCurrentHost(params) {
|
||||
this.hostIndex += 1;
|
||||
if (this.hostIndex === this.hostsList.length) {
|
||||
this.hostIndex = 0;
|
||||
|
|
|
@ -1,46 +1,20 @@
|
|||
import * as http from 'http';
|
||||
import * as https from 'https';
|
||||
import { https as HttpsAgent } from 'httpagent';
|
||||
import * as tls from 'tls';
|
||||
import * as net from 'net';
|
||||
import assert from 'assert';
|
||||
import { dhparam } from '../../https/dh2048';
|
||||
import { ciphers } from '../../https/ciphers';
|
||||
import errors from '../../errors';
|
||||
import { checkSupportIPv6 } from './utils';
|
||||
import { Logger } from 'werelogs';
|
||||
|
||||
|
||||
export default class Server {
|
||||
_noDelay: boolean;
|
||||
_cbOnListening: () => void;
|
||||
_cbOnRequest: (req: http.IncomingMessage, res: http.ServerResponse) => void;
|
||||
_cbOnCheckContinue: (req: http.IncomingMessage, res: http.ServerResponse) => void;
|
||||
_cbOnCheckExpectation: (req: http.IncomingMessage, res: http.ServerResponse) => void;
|
||||
_cbOnError: (err: Error) => boolean;
|
||||
_cbOnStop: () => void;
|
||||
_https: {
|
||||
agent?: https.Agent;
|
||||
ciphers: string;
|
||||
dhparam: string;
|
||||
cert?: string;
|
||||
key?: string;
|
||||
ca?: string[];
|
||||
requestCert: boolean;
|
||||
rejectUnauthorized: boolean;
|
||||
};
|
||||
_port: number;
|
||||
_address: string;
|
||||
_server: http.Server | https.Server | null;
|
||||
_logger: Logger;
|
||||
_keepAliveTimeout: number | null;
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
*
|
||||
* @param port - Port to listen into
|
||||
* @param logger - Logger object
|
||||
* @param {number} port - Port to listen into
|
||||
* @param {werelogs.Logger} logger - Logger object
|
||||
*/
|
||||
constructor(port: number, logger: Logger) {
|
||||
constructor(port, logger) {
|
||||
assert.strictEqual(typeof port, 'number', 'Port must be a number');
|
||||
this._noDelay = true;
|
||||
this._cbOnListening = () => {};
|
||||
|
@ -57,6 +31,9 @@ export default class Server {
|
|||
this._https = {
|
||||
ciphers,
|
||||
dhparam,
|
||||
cert: null,
|
||||
key: null,
|
||||
ca: null,
|
||||
requestCert: false,
|
||||
rejectUnauthorized: true,
|
||||
};
|
||||
|
@ -71,10 +48,10 @@ export default class Server {
|
|||
* Setter to noDelay, this disable the nagle tcp algorithm, reducing
|
||||
* latency for each request
|
||||
*
|
||||
* @param value - { true: Disable, false: Enable }
|
||||
* @return itself
|
||||
* @param {boolean} value - { true: Disable, false: Enable }
|
||||
* @return {Server} itself
|
||||
*/
|
||||
setNoDelay(value: boolean) {
|
||||
setNoDelay(value) {
|
||||
this._noDelay = value;
|
||||
return this;
|
||||
}
|
||||
|
@ -84,10 +61,10 @@ export default class Server {
|
|||
* connections are automatically closed (default should be
|
||||
* 5 seconds in node.js)
|
||||
*
|
||||
* @param keepAliveTimeout - keep-alive timeout in milliseconds
|
||||
* @return - returns this
|
||||
* @param {number} keepAliveTimeout - keep-alive timeout in milliseconds
|
||||
* @return {Server} - returns this
|
||||
*/
|
||||
setKeepAliveTimeout(keepAliveTimeout: number) {
|
||||
setKeepAliveTimeout(keepAliveTimeout) {
|
||||
this._keepAliveTimeout = keepAliveTimeout;
|
||||
return this;
|
||||
}
|
||||
|
@ -95,7 +72,7 @@ export default class Server {
|
|||
/**
|
||||
* Getter to access to the http/https server
|
||||
*
|
||||
* @return http/https server
|
||||
* @return {http.Server|https.Server} http/https server
|
||||
*/
|
||||
getServer() {
|
||||
return this._server;
|
||||
|
@ -104,7 +81,7 @@ export default class Server {
|
|||
/**
|
||||
* Getter to access to the current authority certificate
|
||||
*
|
||||
* @return Authority certificate
|
||||
* @return {string} Authority certificate
|
||||
*/
|
||||
getAuthorityCertificate() {
|
||||
return this._https.ca;
|
||||
|
@ -113,16 +90,17 @@ export default class Server {
|
|||
/**
|
||||
* Setter to the listening port
|
||||
*
|
||||
* @param port - Port to listen into
|
||||
* @param {number} port - Port to listen into
|
||||
* @return {undefined}
|
||||
*/
|
||||
setPort(port: number) {
|
||||
setPort(port) {
|
||||
this._port = port;
|
||||
}
|
||||
|
||||
/**
|
||||
* Getter to access to the listening port
|
||||
*
|
||||
* @return listening port
|
||||
* @return {number} listening port
|
||||
*/
|
||||
getPort() {
|
||||
return this._port;
|
||||
|
@ -131,16 +109,17 @@ export default class Server {
|
|||
/**
|
||||
* Setter to the bind address
|
||||
*
|
||||
* @param address - address bound to the socket
|
||||
* @param {String} address - address bound to the socket
|
||||
* @return {undefined}
|
||||
*/
|
||||
setBindAddress(address: string) {
|
||||
setBindAddress(address) {
|
||||
this._address = address;
|
||||
}
|
||||
|
||||
/**
|
||||
* Getter to access the bind address
|
||||
*
|
||||
* @return address bound to the socket
|
||||
* @return {String} address bound to the socket
|
||||
*/
|
||||
getBindAddress() {
|
||||
return this._address;
|
||||
|
@ -149,7 +128,7 @@ export default class Server {
|
|||
/**
|
||||
* Getter to access to the noDelay (nagle algorithm) configuration
|
||||
*
|
||||
* @return - { true: Disable, false: Enable }
|
||||
* @return {boolean} { true: Disable, false: Enable }
|
||||
*/
|
||||
isNoDelay() {
|
||||
return this._noDelay;
|
||||
|
@ -158,7 +137,7 @@ export default class Server {
|
|||
/**
|
||||
* Getter to know if the server run under https or http
|
||||
*
|
||||
* @return - { true: Https server, false: http server }
|
||||
* @return {boolean} { true: Https server, false: http server }
|
||||
*/
|
||||
isHttps() {
|
||||
return !!this._https.cert && !!this._https.key;
|
||||
|
@ -167,17 +146,20 @@ export default class Server {
|
|||
/**
|
||||
* Setter for the https configuration
|
||||
*
|
||||
* @param [cert] - Content of the certificate
|
||||
* @param [key] - Content of the key
|
||||
* @param [ca] - Content of the authority certificate
|
||||
* @param [twoWay] - Enable the two way exchange, which means
|
||||
* @param {string} [cert] - Content of the certificate
|
||||
* @param {string} [key] - Content of the key
|
||||
* @param {string} [ca] - Content of the authority certificate
|
||||
* @param {boolean} [twoWay] - Enable the two way exchange, which means
|
||||
* each client needs to set up an ssl certificate
|
||||
* @return itself
|
||||
* @return {Server} itself
|
||||
*/
|
||||
setHttps(cert: string, key: string, ca: string, twoWay: boolean) {
|
||||
setHttps(cert, key, ca, twoWay) {
|
||||
this._https = {
|
||||
ciphers,
|
||||
dhparam,
|
||||
cert: null,
|
||||
key: null,
|
||||
ca: null,
|
||||
requestCert: false,
|
||||
rejectUnauthorized: true,
|
||||
};
|
||||
|
@ -201,10 +183,11 @@ export default class Server {
|
|||
/**
|
||||
* Function called when no handler specified in the server
|
||||
*
|
||||
* @param _req - Request object
|
||||
* @param res - Response object
|
||||
* @param {http.IncomingMessage|https.IncomingMessage} req - Request object
|
||||
* @param {http.ServerResponse} res - Response object
|
||||
* @return {undefined}
|
||||
*/
|
||||
_noHandlerCb(_req: http.IncomingMessage, res: http.ServerResponse) {
|
||||
_noHandlerCb(req, res) {
|
||||
// if no handler on the Server, send back an internal error
|
||||
const err = errors.InternalError;
|
||||
const msg = `${err.message}: No handler in Server`;
|
||||
|
@ -218,18 +201,23 @@ export default class Server {
|
|||
/**
|
||||
* Function called when request received
|
||||
*
|
||||
* @param req - Request object
|
||||
* @param res - Response object
|
||||
* @param {http.IncomingMessage} req - Request object
|
||||
* @param {http.ServerResponse} res - Response object
|
||||
* @return {undefined}
|
||||
*/
|
||||
_onRequest(req: http.IncomingMessage, res: http.ServerResponse) {
|
||||
_onRequest(req, res) {
|
||||
return this._cbOnRequest(req, res);
|
||||
}
|
||||
|
||||
/** Function called when the Server is listening */
|
||||
/**
|
||||
* Function called when the Server is listening
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
_onListening() {
|
||||
this._logger.info('Server is listening', {
|
||||
method: 'arsenal.network.Server._onListening',
|
||||
address: this._server?.address(),
|
||||
address: this._server.address(),
|
||||
serverIP: this._address,
|
||||
serverPort: this._port,
|
||||
});
|
||||
|
@ -239,10 +227,10 @@ export default class Server {
|
|||
/**
|
||||
* Function called when the Server sends back an error
|
||||
*
|
||||
* @param err - Error to be sent back
|
||||
* @return
|
||||
* @param {Error} err - Error to be sent back
|
||||
* @return {undefined}
|
||||
*/
|
||||
_onError(err: Error) {
|
||||
_onError(err) {
|
||||
this._logger.error('Server error', {
|
||||
method: 'arsenal.network.Server._onError',
|
||||
port: this._port,
|
||||
|
@ -255,9 +243,13 @@ export default class Server {
|
|||
}
|
||||
}
|
||||
|
||||
/** Function called when the Server is stopped */
|
||||
/**
|
||||
* Function called when the Server is stopped
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
_onClose() {
|
||||
if (this._server?.listening) {
|
||||
if (this._server.listening) {
|
||||
this._logger.info('Server is stopped', {
|
||||
address: this._server.address(),
|
||||
});
|
||||
|
@ -269,10 +261,10 @@ export default class Server {
|
|||
/**
|
||||
* Set the listening callback
|
||||
*
|
||||
* @param cb - Callback()
|
||||
* @return itself
|
||||
* @param {function} cb - Callback()
|
||||
* @return {Server} itself
|
||||
*/
|
||||
onListening(cb: () => void) {
|
||||
onListening(cb) {
|
||||
assert.strictEqual(typeof cb, 'function',
|
||||
'Callback must be a function');
|
||||
this._cbOnListening = cb;
|
||||
|
@ -282,10 +274,10 @@ export default class Server {
|
|||
/**
|
||||
* Set the request handler callback
|
||||
*
|
||||
* @param cb - Callback(req, res)
|
||||
* @return itself
|
||||
* @param {function} cb - Callback(req, res)
|
||||
* @return {Server} itself
|
||||
*/
|
||||
onRequest(cb: (req: http.IncomingMessage, res: http.ServerResponse) => void) {
|
||||
onRequest(cb) {
|
||||
assert.strictEqual(typeof cb, 'function',
|
||||
'Callback must be a function');
|
||||
this._cbOnRequest = cb;
|
||||
|
@ -295,10 +287,10 @@ export default class Server {
|
|||
/**
|
||||
* Set the checkExpectation handler callback
|
||||
*
|
||||
* @param cb - Callback(req, res)
|
||||
* @return itself
|
||||
* @param {function} cb - Callback(req, res)
|
||||
* @return {Server} itself
|
||||
*/
|
||||
onCheckExpectation(cb: (req: http.IncomingMessage, res: http.ServerResponse) => void) {
|
||||
onCheckExpectation(cb) {
|
||||
assert.strictEqual(typeof cb, 'function',
|
||||
'Callback must be a function');
|
||||
this._cbOnCheckExpectation = cb;
|
||||
|
@ -308,10 +300,10 @@ export default class Server {
|
|||
/**
|
||||
* Set the checkContinue handler callback
|
||||
*
|
||||
* @param cb - Callback(req, res)
|
||||
* @return itself
|
||||
* @param {function} cb - Callback(req, res)
|
||||
* @return {Server} itself
|
||||
*/
|
||||
onCheckContinue(cb: (req: http.IncomingMessage, res: http.ServerResponse) => void) {
|
||||
onCheckContinue(cb) {
|
||||
assert.strictEqual(typeof cb, 'function',
|
||||
'Callback must be a function');
|
||||
this._cbOnCheckContinue = cb;
|
||||
|
@ -322,10 +314,10 @@ export default class Server {
|
|||
* Set the error handler callback, if this handler returns true when an
|
||||
* error is triggered, the server will restart
|
||||
*
|
||||
* @param cb - Callback(err)
|
||||
* @return itself
|
||||
* @param {function} cb - Callback(err)
|
||||
* @return {Server} itself
|
||||
*/
|
||||
onError(cb: (err: Error) => boolean) {
|
||||
onError(cb) {
|
||||
assert.strictEqual(typeof cb, 'function',
|
||||
'Callback must be a function');
|
||||
this._cbOnError = cb;
|
||||
|
@ -335,10 +327,10 @@ export default class Server {
|
|||
/**
|
||||
* Set the stop handler callback
|
||||
*
|
||||
* @param cb - Callback()
|
||||
* @return itself
|
||||
* @param {function} cb - Callback()
|
||||
* @return {Server} itself
|
||||
*/
|
||||
onStop(cb: () => void) {
|
||||
onStop(cb) {
|
||||
assert.strictEqual(typeof cb, 'function',
|
||||
'Callback must be a function');
|
||||
this._cbOnStop = cb;
|
||||
|
@ -348,9 +340,10 @@ export default class Server {
|
|||
/**
|
||||
* Function called when a secure connection is etablished
|
||||
*
|
||||
* @param sock - socket
|
||||
* @param {tls.TlsSocket} sock - socket
|
||||
* @return {undefined}
|
||||
*/
|
||||
_onSecureConnection(sock: tls.TLSSocket) {
|
||||
_onSecureConnection(sock) {
|
||||
if (this._https.requestCert && !sock.authorized) {
|
||||
this._logger.error('rejected secure connection', {
|
||||
address: sock.address(),
|
||||
|
@ -363,10 +356,11 @@ export default class Server {
|
|||
/**
|
||||
* function called when an error came from the client request
|
||||
*
|
||||
* @param err - Error
|
||||
* @param sock - Socket
|
||||
* @param {Error} err - Error
|
||||
* @param {net.Socket|tls.TlsSocket} sock - Socket
|
||||
* @return {undefined}
|
||||
*/
|
||||
_onClientError(err: Error, sock: net.Socket | tls.TLSSocket) {
|
||||
_onClientError(err, sock) {
|
||||
this._logger.error('client error', {
|
||||
method: 'arsenal.network.Server._onClientError',
|
||||
error: err.stack || err,
|
||||
|
@ -380,10 +374,11 @@ export default class Server {
|
|||
* Function called when request with an HTTP Expect header is received,
|
||||
* where the value is not 100-continue
|
||||
*
|
||||
* @param req - Request object
|
||||
* @param res - Response object
|
||||
* @param {http.IncomingMessage|https.IncomingMessage} req - Request object
|
||||
* @param {http.ServerResponse} res - Response object
|
||||
* @return {undefined}
|
||||
*/
|
||||
_onCheckExpectation(req: http.IncomingMessage, res: http.ServerResponse) {
|
||||
_onCheckExpectation(req, res) {
|
||||
return this._cbOnCheckExpectation(req, res);
|
||||
}
|
||||
|
||||
|
@ -391,17 +386,18 @@ export default class Server {
|
|||
* Function called when request with an HTTP Expect: 100-continue
|
||||
* is received
|
||||
*
|
||||
* @param req - Request object
|
||||
* @param res - Response object
|
||||
* @param {http.IncomingMessage|https.IncomingMessage} req - Request object
|
||||
* @param {http.ServerResponse} res - Response object
|
||||
* @return {undefined}
|
||||
*/
|
||||
_onCheckContinue(req: http.IncomingMessage, res: http.ServerResponse) {
|
||||
_onCheckContinue(req, res) {
|
||||
return this._cbOnCheckContinue(req, res);
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to start the Server
|
||||
*
|
||||
* @return itself
|
||||
* @return {Server} itself
|
||||
*/
|
||||
start() {
|
||||
if (!this._server) {
|
||||
|
@ -410,11 +406,7 @@ export default class Server {
|
|||
method: 'arsenal.network.Server.start',
|
||||
port: this._port,
|
||||
});
|
||||
this._https.agent = new HttpsAgent.Agent(this._https, {
|
||||
// Do not enforce the maximum number of sockets for the
|
||||
// main server, as it might be able to serve more clients.
|
||||
maxSockets: false,
|
||||
});
|
||||
this._https.agent = new https.Agent(this._https);
|
||||
this._server = https.createServer(this._https,
|
||||
(req, res) => this._onRequest(req, res));
|
||||
} else {
|
||||
|
@ -434,7 +426,6 @@ export default class Server {
|
|||
sock => this._onSecureConnection(sock));
|
||||
this._server.on('connection', sock => {
|
||||
// Setting no delay of the socket to the value configured
|
||||
// TODO fix this
|
||||
sock.setNoDelay(this.isNoDelay());
|
||||
sock.on('error', err => this._logger.info(
|
||||
'socket error - request rejected', { error: err }));
|
||||
|
@ -442,7 +433,6 @@ export default class Server {
|
|||
this._server.on('tlsClientError', (err, sock) =>
|
||||
this._onClientError(err, sock));
|
||||
this._server.on('clientError', (err, sock) =>
|
||||
// @ts-expect-errors
|
||||
this._onClientError(err, sock));
|
||||
this._server.on('checkContinue', (req, res) =>
|
||||
this._onCheckContinue(req, res));
|
||||
|
@ -457,7 +447,7 @@ export default class Server {
|
|||
/**
|
||||
* Function to stop the Server
|
||||
*
|
||||
* @return itself
|
||||
* @return {Server} itself
|
||||
*/
|
||||
stop() {
|
||||
if (this._server) {
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import * as os from 'os';
|
||||
import errors, { ArsenalError } from '../../errors';
|
||||
import errors from '../../errors';
|
||||
|
||||
/**
|
||||
* Parse the Range header into an object
|
||||
*
|
||||
* @param rangeHeader - The 'Range' header value
|
||||
* @param {String} rangeHeader - The 'Range' header value
|
||||
|
||||
* @return object containing a range specification, with
|
||||
* @return {Object} object containing a range specification, with
|
||||
* either of:
|
||||
* - start and end attributes: a fully specified range request
|
||||
* - a single start attribute: no end is specified in the range request
|
||||
|
@ -14,12 +14,7 @@ import errors, { ArsenalError } from '../../errors';
|
|||
* - an error attribute of type errors.InvalidArgument if the range
|
||||
* syntax is invalid
|
||||
*/
|
||||
export function parseRangeSpec(
|
||||
rangeHeader: string
|
||||
):
|
||||
| { error: ArsenalError }
|
||||
| { suffix: number }
|
||||
| { start: number; end?: number } {
|
||||
export function parseRangeSpec(rangeHeader) {
|
||||
const rangeMatch = /^bytes=([0-9]+)?-([0-9]+)?$/.exec(rangeHeader);
|
||||
if (rangeMatch) {
|
||||
const rangeValues = rangeMatch.slice(1, 3);
|
||||
|
@ -28,8 +23,7 @@ export function parseRangeSpec(
|
|||
return { suffix: Number.parseInt(rangeValues[1], 10) };
|
||||
}
|
||||
} else {
|
||||
const rangeSpec: { start: number; end?: number } =
|
||||
{ start: Number.parseInt(rangeValues[0], 10) };
|
||||
const rangeSpec = { start: Number.parseInt(rangeValues[0], 10) };
|
||||
if (rangeValues[1] === undefined) {
|
||||
return rangeSpec;
|
||||
}
|
||||
|
@ -46,11 +40,11 @@ export function parseRangeSpec(
|
|||
* Convert a range specification as given by parseRangeSpec() into a
|
||||
* fully specified absolute byte range
|
||||
*
|
||||
* @param rangeSpec - Parsed range specification as returned
|
||||
* @param {Number []} rangeSpec - Parsed range specification as returned
|
||||
* by parseRangeSpec()
|
||||
* @param objectSize - Total byte size of the whole object
|
||||
* @param {Number} objectSize - Total byte size of the whole object
|
||||
|
||||
* @return object containing either:
|
||||
* @return {Object} object containing either:
|
||||
* - a 'range' attribute which is a fully specified byte range [start,
|
||||
end], as the inclusive absolute byte range to request from the
|
||||
object
|
||||
|
@ -59,11 +53,8 @@ export function parseRangeSpec(
|
|||
* - or an 'error' attribute of type errors.InvalidRange if the
|
||||
* requested range is out of object's boundaries.
|
||||
*/
|
||||
export function getByteRangeFromSpec(
|
||||
rangeSpec: { suffix: number } | { start: number; end?: number },
|
||||
objectSize: number
|
||||
): { error: ArsenalError } | { range: [number, number] } | {} {
|
||||
if ('suffix' in rangeSpec) {
|
||||
export function getByteRangeFromSpec(rangeSpec, objectSize) {
|
||||
if (rangeSpec.suffix !== undefined) {
|
||||
if (rangeSpec.suffix === 0) {
|
||||
// 0-byte suffix is always invalid (even on empty objects)
|
||||
return { error: errors.InvalidRange };
|
||||
|
@ -77,11 +68,10 @@ export function getByteRangeFromSpec(
|
|||
objectSize - 1] };
|
||||
}
|
||||
if (rangeSpec.start < objectSize) {
|
||||
// test is false if end is undefined or end is greater than objectSize
|
||||
const end: number = rangeSpec.end !== undefined && rangeSpec.end < objectSize
|
||||
? rangeSpec.end
|
||||
: objectSize - 1;
|
||||
return { range: [rangeSpec.start, end] };
|
||||
// test is false if end is undefined
|
||||
return { range: [rangeSpec.start,
|
||||
(rangeSpec.end < objectSize ?
|
||||
rangeSpec.end : objectSize - 1)] };
|
||||
}
|
||||
return { error: errors.InvalidRange };
|
||||
}
|
||||
|
@ -90,10 +80,10 @@ export function getByteRangeFromSpec(
|
|||
* Convenience function that combines parseRangeSpec() and
|
||||
* getByteRangeFromSpec()
|
||||
*
|
||||
* @param rangeHeader - The 'Range' header value
|
||||
* @param objectSize - Total byte size of the whole object
|
||||
* @param {String} rangeHeader - The 'Range' header value
|
||||
* @param {Number} objectSize - Total byte size of the whole object
|
||||
|
||||
* @return object containing either:
|
||||
* @return {Object} object containing either:
|
||||
* - a 'range' attribute which is a fully specified byte range [start,
|
||||
* end], as the inclusive absolute byte range to request from the
|
||||
* object
|
||||
|
@ -103,12 +93,9 @@ export function getByteRangeFromSpec(
|
|||
* - or an 'error' attribute instead of type errors.InvalidRange if
|
||||
* the requested range is out of object's boundaries.
|
||||
*/
|
||||
export function parseRange(
|
||||
rangeHeader: string,
|
||||
objectSize: number
|
||||
): { range: [number, number] } | {} | { error: ArsenalError } {
|
||||
export function parseRange(rangeHeader, objectSize) {
|
||||
const rangeSpec = parseRangeSpec(rangeHeader);
|
||||
if ('error' in rangeSpec) {
|
||||
if (rangeSpec.error) {
|
||||
// invalid range syntax is silently ignored in HTTP spec,
|
||||
// hence returns the whole object
|
||||
return {};
|
||||
|
@ -118,6 +105,6 @@ export function getByteRangeFromSpec(
|
|||
|
||||
export function checkSupportIPv6() {
|
||||
const niList = os.networkInterfaces();
|
||||
return Object.keys(niList).some((network) =>
|
||||
niList[network]?.some(intfc => intfc.family === 'IPv6'));
|
||||
return Object.keys(niList).some(network =>
|
||||
niList[network].some(intfc => intfc.family === 'IPv6'));
|
||||
}
|
||||
|
|
|
@ -1,17 +1,15 @@
|
|||
import server from './http/server';
|
||||
import * as utils from './http/utils';
|
||||
export * as rpc from './rpc/rpc';
|
||||
export * as level from './rpc/level-net';
|
||||
import RESTServer from './rest/RESTServer';
|
||||
import RESTClient from './rest/RESTClient';
|
||||
export { default as RoundRobin } from './RoundRobin';
|
||||
import * as ProbeServer from './probe/ProbeServer';
|
||||
import HealthProbeServer from './probe/HealthProbeServer';
|
||||
import * as Utils from './probe/Utils';
|
||||
export * as kmip from './kmip';
|
||||
export { default as kmipClient } from './kmip/Client';
|
||||
|
||||
export const http = { server, utils };
|
||||
export const http = { server };
|
||||
export const rest = { RESTServer, RESTClient };
|
||||
export const probe = { ProbeServer, HealthProbeServer, Utils };
|
||||
|
||||
export { default as RoundRobin } from './RoundRobin';
|
||||
export { default as kmip } from './kmip';
|
||||
export { default as kmipClient } from './kmip/Client';
|
||||
export * as rpc from './rpc/rpc';
|
||||
export * as level from './rpc/level-net';
|
||||
|
|
|
@ -1,12 +1,8 @@
|
|||
'use strict'; // eslint-disable-line
|
||||
/* eslint new-cap: "off" */
|
||||
|
||||
import async from 'async';
|
||||
import errors from '../../errors';
|
||||
import TTLVCodec from './codec/ttlv';
|
||||
import TlsTransport from './transport/tls';
|
||||
import KMIP from '.';
|
||||
import * as werelogs from 'werelogs';
|
||||
|
||||
const CRYPTOGRAPHIC_OBJECT_TYPE = 'Symmetric Key';
|
||||
const CRYPTOGRAPHIC_ALGORITHM = 'AES';
|
||||
|
@ -47,10 +43,10 @@ const searchFilter = {
|
|||
|
||||
/**
|
||||
* Normalize errors according to arsenal definitions
|
||||
* @param err - an Error instance or a message string
|
||||
* @returns - arsenal error
|
||||
* @param {string | Error} err - an Error instance or a message string
|
||||
* @returns {arsenal.errors} - arsenal error
|
||||
*/
|
||||
function _arsenalError(err: string | Error) {
|
||||
function _arsenalError(err) {
|
||||
const messagePrefix = 'KMIP:';
|
||||
if (typeof err === 'string') {
|
||||
return errors.InternalError
|
||||
|
@ -59,7 +55,6 @@ function _arsenalError(err: string | Error) {
|
|||
err instanceof Error ||
|
||||
// INFO: The second part is here only for Jest, to remove when we'll be
|
||||
// fully migrated to TS
|
||||
// @ts-expect-error
|
||||
(err && typeof err.message === 'string')
|
||||
) {
|
||||
return errors.InternalError
|
||||
|
@ -73,11 +68,12 @@ function _arsenalError(err: string | Error) {
|
|||
/**
|
||||
* Negotiate with the server the use of a recent version of the protocol and
|
||||
* update the low level driver with this new knowledge.
|
||||
* @param client - The Client instance
|
||||
* @param logger - Werelog logger object
|
||||
* @param cb - The callback triggered after the negotiation.
|
||||
* @param {Object} client - The Client instance
|
||||
* @param {Object} logger - Werelog logger object
|
||||
* @param {Function} cb - The callback triggered after the negotiation.
|
||||
* @returns {undefined}
|
||||
*/
|
||||
function _negotiateProtocolVersion(client: any, logger: werelogs.Logger, cb: any) {
|
||||
function _negotiateProtocolVersion(client, logger, cb) {
|
||||
return client.kmip.request(logger, 'Discover Versions', [
|
||||
KMIP.Structure('Protocol Version', [
|
||||
KMIP.Integer('Protocol Version Major', 1),
|
||||
|
@ -119,11 +115,12 @@ function _negotiateProtocolVersion(client: any, logger: werelogs.Logger, cb: any
|
|||
/**
|
||||
* Obtain from the server the various extensions defined by the vendor
|
||||
* and update the low level driver with this new knowledge.
|
||||
* @param client - The Client instance
|
||||
* @param logger - Werelog logger object
|
||||
* @param cb - The callback triggered after the extension mapping
|
||||
* @param {Object} client - The Client instance
|
||||
* @param {Object} logger - Werelog logger object
|
||||
* @param {Function} cb - The callback triggered after the extension mapping
|
||||
* @returns {undefined}
|
||||
*/
|
||||
function _mapExtensions(client: any, logger: werelogs.Logger, cb: any) {
|
||||
function _mapExtensions(client, logger, cb) {
|
||||
return client.kmip.request(logger, 'Query', [
|
||||
KMIP.Enumeration('Query Function', 'Query Extension Map'),
|
||||
], (err, response) => {
|
||||
|
@ -152,11 +149,12 @@ function _mapExtensions(client: any, logger: werelogs.Logger, cb: any) {
|
|||
|
||||
/**
|
||||
* Query the Server information and identify its vendor
|
||||
* @param client - The Client instance
|
||||
* @param logger - Werelog logger object
|
||||
* @param cb - The callback triggered after the information discovery
|
||||
* @param {Object} client - The Client instance
|
||||
* @param {Object} logger - Werelog logger object
|
||||
* @param {Function} cb - The callback triggered after the information discovery
|
||||
* @returns {undefined}
|
||||
*/
|
||||
function _queryServerInformation(client: any, logger: werelogs.Logger, cb: any) {
|
||||
function _queryServerInformation(client, logger, cb) {
|
||||
client.kmip.request(logger, 'Query', [
|
||||
KMIP.Enumeration('Query Function', 'Query Server Information'),
|
||||
], (err, response) => {
|
||||
|
@ -186,11 +184,12 @@ function _queryServerInformation(client: any, logger: werelogs.Logger, cb: any)
|
|||
* is not a show stopper because some vendor support more or less what they
|
||||
* announce. If a subsequent request fails, this information can be used to
|
||||
* figure out the reason for the failure.
|
||||
* @param client - The Client instance
|
||||
* @param logger - Werelog logger object
|
||||
* @param cb - The callback triggered after the information discovery
|
||||
* @param {Object} client - The Client instance
|
||||
* @param {Object} logger - Werelog logger object
|
||||
* @param {Function} cb - The callback triggered after the information discovery
|
||||
* @returns {undefined}
|
||||
*/
|
||||
function _queryOperationsAndObjects(client: any, logger: werelogs.Logger, cb: any) {
|
||||
function _queryOperationsAndObjects(client, logger, cb) {
|
||||
return client.kmip.request(logger, 'Query', [
|
||||
KMIP.Enumeration('Query Function', 'Query Operations'),
|
||||
KMIP.Enumeration('Query Function', 'Query Objects'),
|
||||
|
@ -238,48 +237,29 @@ function _queryOperationsAndObjects(client: any, logger: werelogs.Logger, cb: an
|
|||
});
|
||||
}
|
||||
|
||||
|
||||
export default class Client {
|
||||
options: any;
|
||||
vendorIdentification: string;
|
||||
serverInformation: any[];
|
||||
kmip: KMIP;
|
||||
|
||||
/**
|
||||
* Construct a high level KMIP driver suitable for cloudserver
|
||||
* @param options - Instance options
|
||||
* @param options.kmip - Low level driver options
|
||||
* @param options.kmip.client - This high level driver options
|
||||
* @param options.kmip.client.compoundCreateActivate -
|
||||
* @param {Object} options - Instance options
|
||||
* @param {Object} options.kmip - Low level driver options
|
||||
* @param {Object} options.kmip.client - This high level driver options
|
||||
* @param {Object} options.kmip.client.compoundCreateActivate -
|
||||
* Depends on the server's ability. False offers the best
|
||||
* compatibility. True does not offer a significant
|
||||
* performance gain, but can be useful in case of unreliable
|
||||
* time synchronization between the client and the server.
|
||||
* @param options.kmip.client.bucketNameAttributeName -
|
||||
* @param {Object} options.kmip.client.bucketNameAttributeName -
|
||||
* Depends on the server's ability. Not specifying this
|
||||
* offers the best compatibility and disable the attachement
|
||||
* of the bucket name as a key attribute.
|
||||
* @param options.kmip.codec - KMIP Codec options
|
||||
* @param options.kmip.transport - KMIP Transport options
|
||||
* @param CodecClass - diversion for the Codec class,
|
||||
* @param {Object} options.kmip.codec - KMIP Codec options
|
||||
* @param {Object} options.kmip.transport - KMIP Transport options
|
||||
* @param {Class} CodecClass - diversion for the Codec class,
|
||||
* defaults to TTLVCodec
|
||||
* @param TransportClass - diversion for the Transport class,
|
||||
* @param {Class} TransportClass - diversion for the Transport class,
|
||||
* defaults to TlsTransport
|
||||
*/
|
||||
constructor(
|
||||
options: {
|
||||
kmip: {
|
||||
codec: any;
|
||||
transport: any;
|
||||
client: {
|
||||
compoundCreateActivate: any;
|
||||
bucketNameAttributeName: any;
|
||||
};
|
||||
}
|
||||
},
|
||||
CodecClass: any,
|
||||
TransportClass: any,
|
||||
) {
|
||||
constructor(options, CodecClass, TransportClass) {
|
||||
this.options = options.kmip.client || {};
|
||||
this.vendorIdentification = '';
|
||||
this.serverInformation = [];
|
||||
|
@ -293,27 +273,30 @@ export default class Client {
|
|||
|
||||
/**
|
||||
* Update this client with the vendor identification of the server
|
||||
* @param vendorIdentification - Vendor identification string
|
||||
* @param {String} vendorIdentification - Vendor identification string
|
||||
* @returns {undefined}
|
||||
*/
|
||||
_setVendorIdentification(vendorIdentification: string) {
|
||||
_setVendorIdentification(vendorIdentification) {
|
||||
this.vendorIdentification = vendorIdentification;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update this client with the information about the server
|
||||
* @param serverInformation - Server information object
|
||||
* @param {Object} serverInformation - Server information object
|
||||
* @returns {undefined}
|
||||
*/
|
||||
_setServerInformation(serverInformation: any) {
|
||||
_setServerInformation(serverInformation) {
|
||||
this.serverInformation = serverInformation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform the KMIP level handshake with the server
|
||||
* @param logger - Werelog logger object
|
||||
* @param cb - Callback to be triggered at the end of the
|
||||
* @param {Object} logger - Werelog logger object
|
||||
* @param {Function} cb - Callback to be triggered at the end of the
|
||||
* handshake. cb(err: Error)
|
||||
* @returns {undefined}
|
||||
*/
|
||||
_kmipHandshake(logger: werelogs.Logger, cb: any) {
|
||||
_kmipHandshake(logger, cb) {
|
||||
return async.waterfall([
|
||||
next => _negotiateProtocolVersion(this, logger, next),
|
||||
next => _mapExtensions(this, logger, next),
|
||||
|
@ -327,11 +310,12 @@ export default class Client {
|
|||
* Activate a cryptographic key managed by the server,
|
||||
* for a specific bucket. This is a required action to perform after
|
||||
* the key creation.
|
||||
* @param keyIdentifier - The bucket key Id
|
||||
* @param logger - Werelog logger object
|
||||
* @param cb - The callback(err: Error)
|
||||
* @param {string} keyIdentifier - The bucket key Id
|
||||
* @param {object} logger - Werelog logger object
|
||||
* @param {function} cb - The callback(err: Error)
|
||||
* @returns {undefined}
|
||||
*/
|
||||
_activateBucketKey(keyIdentifier: string, logger: werelogs.Logger, cb: any) {
|
||||
_activateBucketKey(keyIdentifier, logger, cb) {
|
||||
return this.kmip.request(logger, 'Activate', [
|
||||
KMIP.TextString('Unique Identifier', keyIdentifier),
|
||||
], (err, response) => {
|
||||
|
@ -358,12 +342,13 @@ export default class Client {
|
|||
/**
|
||||
* Create a new cryptographic key managed by the server,
|
||||
* for a specific bucket
|
||||
* @param bucketName - The bucket name
|
||||
* @param logger - Werelog logger object
|
||||
* @param cb - The callback(err: Error, bucketKeyId: String)
|
||||
* @param {string} bucketName - The bucket name
|
||||
* @param {object} logger - Werelog logger object
|
||||
* @param {function} cb - The callback(err: Error, bucketKeyId: String)
|
||||
* @returns {undefined}
|
||||
*/
|
||||
createBucketKey(bucketName: string, logger: werelogs.Logger, cb: any) {
|
||||
const attributes: any = [];
|
||||
createBucketKey(bucketName, logger, cb) {
|
||||
const attributes = [];
|
||||
if (!!this.options.bucketNameAttributeName) {
|
||||
attributes.push(KMIP.Attribute('TextString',
|
||||
this.options.bucketNameAttributeName,
|
||||
|
@ -379,8 +364,6 @@ export default class Client {
|
|||
CRYPTOGRAPHIC_USAGE_MASK))]);
|
||||
if (this.options.compoundCreateActivate) {
|
||||
attributes.push(KMIP.Attribute('Date-Time', 'Activation Date',
|
||||
// TODO What's happening here? That can not work.
|
||||
// @ts-expect-error
|
||||
new Date(Date.UTC())));
|
||||
}
|
||||
|
||||
|
@ -417,11 +400,12 @@ export default class Client {
|
|||
* Revoke a cryptographic key managed by the server, for a specific bucket.
|
||||
* This is a required action to perform before being able to destroy the
|
||||
* managed key.
|
||||
* @param bucketKeyId - The bucket key Id
|
||||
* @param logger - Werelog logger object
|
||||
* @param cb - The callback(err: Error)
|
||||
* @param {string} bucketKeyId - The bucket key Id
|
||||
* @param {object} logger - Werelog logger object
|
||||
* @param {function} cb - The callback(err: Error)
|
||||
* @returns {undefined}
|
||||
*/
|
||||
_revokeBucketKey(bucketKeyId: string, logger: werelogs.Logger, cb: any) {
|
||||
_revokeBucketKey(bucketKeyId, logger, cb) {
|
||||
// maybe revoke first
|
||||
return this.kmip.request(logger, 'Revoke', [
|
||||
KMIP.TextString('Unique Identifier', bucketKeyId),
|
||||
|
@ -454,11 +438,12 @@ export default class Client {
|
|||
|
||||
/**
|
||||
* Destroy a cryptographic key managed by the server, for a specific bucket.
|
||||
* @param bucketKeyId - The bucket key Id
|
||||
* @param logger - Werelog logger object
|
||||
* @param cb - The callback(err: Error)
|
||||
* @param {string} bucketKeyId - The bucket key Id
|
||||
* @param {object} logger - Werelog logger object
|
||||
* @param {function} cb - The callback(err: Error)
|
||||
* @returns {undefined}
|
||||
*/
|
||||
destroyBucketKey(bucketKeyId: string, logger: werelogs.Logger, cb: any) {
|
||||
destroyBucketKey(bucketKeyId, logger, cb) {
|
||||
return this._revokeBucketKey(bucketKeyId, logger, err => {
|
||||
if (err) {
|
||||
const error = _arsenalError(err);
|
||||
|
@ -493,20 +478,19 @@ export default class Client {
|
|||
|
||||
/**
|
||||
*
|
||||
* @param cryptoScheme - crypto scheme version number
|
||||
* @param masterKeyId - key to retrieve master key
|
||||
* @param plainTextDataKey - data key
|
||||
* @param logger - werelog logger object
|
||||
* @param cb - callback
|
||||
* @param {number} cryptoScheme - crypto scheme version number
|
||||
* @param {string} masterKeyId - key to retrieve master key
|
||||
* @param {buffer} plainTextDataKey - data key
|
||||
* @param {object} logger - werelog logger object
|
||||
* @param {function} cb - callback
|
||||
* @returns {undefined}
|
||||
* @callback called with (err, cipheredDataKey: Buffer)
|
||||
*/
|
||||
cipherDataKey(
|
||||
cryptoScheme: number,
|
||||
masterKeyId: string,
|
||||
plainTextDataKey: Buffer,
|
||||
logger: werelogs.Logger,
|
||||
cb: any,
|
||||
) {
|
||||
cipherDataKey(cryptoScheme,
|
||||
masterKeyId,
|
||||
plainTextDataKey,
|
||||
logger,
|
||||
cb) {
|
||||
return this.kmip.request(logger, 'Encrypt', [
|
||||
KMIP.TextString('Unique Identifier', masterKeyId),
|
||||
KMIP.Structure('Cryptographic Parameters', [
|
||||
|
@ -543,20 +527,19 @@ export default class Client {
|
|||
|
||||
/**
|
||||
*
|
||||
* @param cryptoScheme - crypto scheme version number
|
||||
* @param masterKeyId - key to retrieve master key
|
||||
* @param cipheredDataKey - data key
|
||||
* @param logger - werelog logger object
|
||||
* @param cb - callback
|
||||
* @param {number} cryptoScheme - crypto scheme version number
|
||||
* @param {string} masterKeyId - key to retrieve master key
|
||||
* @param {buffer} cipheredDataKey - data key
|
||||
* @param {object} logger - werelog logger object
|
||||
* @param {function} cb - callback
|
||||
* @returns {undefined}
|
||||
* @callback called with (err, plainTextDataKey: Buffer)
|
||||
*/
|
||||
decipherDataKey(
|
||||
cryptoScheme: number,
|
||||
masterKeyId: string,
|
||||
cipheredDataKey: Buffer,
|
||||
logger: werelogs.Logger,
|
||||
cb: any,
|
||||
) {
|
||||
decipherDataKey(cryptoScheme,
|
||||
masterKeyId,
|
||||
cipheredDataKey,
|
||||
logger,
|
||||
cb) {
|
||||
return this.kmip.request(logger, 'Decrypt', [
|
||||
KMIP.TextString('Unique Identifier', masterKeyId),
|
||||
KMIP.Structure('Cryptographic Parameters', [
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import assert from 'assert';
|
||||
|
||||
function _lookup(decodedTTLV: any[], path: string) {
|
||||
|
||||
function _lookup(decodedTTLV, path) {
|
||||
const xpath = path.split('/').filter(word => word.length > 0);
|
||||
const canonicalPath = xpath.join('/');
|
||||
const obj = decodedTTLV;
|
||||
let res: any[] = [];
|
||||
let res = [];
|
||||
assert(Array.isArray(obj));
|
||||
for (let current = xpath.shift(); current; current = xpath.shift()) {
|
||||
for (let i = 0; i < obj.length; ++i) {
|
||||
|
@ -29,23 +30,21 @@ function _lookup(decodedTTLV: any[], path: string) {
|
|||
}
|
||||
|
||||
export default class Message {
|
||||
content: any[];
|
||||
|
||||
/**
|
||||
* Construct a new abstract Message
|
||||
* @param content - the content of the message
|
||||
* @param {Object} content - the content of the message
|
||||
*/
|
||||
constructor(content: any[]) {
|
||||
constructor(content) {
|
||||
this.content = content;
|
||||
}
|
||||
|
||||
/**
|
||||
* Lookup the values corresponding to the provided path
|
||||
* @param path - the path in the hierarchy of the values
|
||||
* @param {String} path - the path in the hierarchy of the values
|
||||
* of interest
|
||||
* @return - an array of the values matching the provided path
|
||||
* @return {Object} - an array of the values matching the provided path
|
||||
*/
|
||||
lookup(path: string) {
|
||||
lookup(path) {
|
||||
return _lookup(this.content, path);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
/* eslint dot-notation: "off" */
|
||||
import KMIPTags from '../tags.json';
|
||||
import KMIPMessage from '../Message';
|
||||
import * as werelogs from 'werelogs';
|
||||
|
||||
const UINT32_MAX = Math.pow(2, 32);
|
||||
|
||||
function _ttlvPadVector(vec: any[]) {
|
||||
function _ttlvPadVector(vec) {
|
||||
let length = 0;
|
||||
vec.forEach(buf => {
|
||||
if (!(buf instanceof Buffer)) {
|
||||
|
@ -20,14 +18,13 @@ function _ttlvPadVector(vec: any[]) {
|
|||
return vec;
|
||||
}
|
||||
|
||||
function _throwError(logger: werelogs.Logger, msg: string, data?: LogDictionary) {
|
||||
function _throwError(logger, msg, data) {
|
||||
logger.error(msg, data);
|
||||
throw Error(msg);
|
||||
}
|
||||
|
||||
export default function TTLVCodec() {
|
||||
if (!new.target) {
|
||||
// @ts-ignore
|
||||
return new TTLVCodec();
|
||||
}
|
||||
|
||||
|
@ -44,7 +41,7 @@ export default function TTLVCodec() {
|
|||
const funcName = 'Structure::decode';
|
||||
const length = value.length;
|
||||
let i = 0;
|
||||
const result: any[] = [];
|
||||
const result = [];
|
||||
let diversion = null;
|
||||
while (i < length) {
|
||||
const element = {};
|
||||
|
@ -52,7 +49,7 @@ export default function TTLVCodec() {
|
|||
const elementType =
|
||||
value.slice(i + 3, i + 4).toString('hex');
|
||||
const elementLength = value.readUInt32BE(i + 4);
|
||||
const property: any = {};
|
||||
const property = {};
|
||||
if (!TypeDecoder[elementType]) {
|
||||
_throwError(logger,
|
||||
'Unknown element type',
|
||||
|
@ -99,7 +96,7 @@ export default function TTLVCodec() {
|
|||
const type = Buffer.from(TypeEncoder['Structure'].value, 'hex');
|
||||
const length = Buffer.alloc(4);
|
||||
let vectorLength = 0;
|
||||
let encodedValue: any[] = [];
|
||||
let encodedValue = [];
|
||||
value.forEach(item => {
|
||||
Object.keys(item).forEach(key => {
|
||||
const itemTagName = key;
|
||||
|
@ -112,7 +109,7 @@ export default function TTLVCodec() {
|
|||
if (!TypeEncoder[itemType]) {
|
||||
throw Error(`Unknown Type '${itemType}'`);
|
||||
}
|
||||
const itemResult: any[] =
|
||||
const itemResult =
|
||||
TypeEncoder[itemType].encode(itemTagName,
|
||||
itemValue,
|
||||
itemDiversion);
|
||||
|
@ -344,7 +341,7 @@ export default function TTLVCodec() {
|
|||
|
||||
/* Construct TagDecoder */
|
||||
Object.keys(TagDecoder).forEach(key => {
|
||||
const element: any = {};
|
||||
const element = {};
|
||||
element.value = key;
|
||||
if (TagDecoder[key]['enumeration']) {
|
||||
const enumeration = {};
|
||||
|
@ -369,7 +366,7 @@ export default function TTLVCodec() {
|
|||
|
||||
|
||||
/* Public Methods Definition */
|
||||
// @ts-ignore
|
||||
|
||||
this.encodeMask = (tagName, value) => {
|
||||
let mask = 0;
|
||||
value.forEach(item => {
|
||||
|
@ -382,10 +379,9 @@ export default function TTLVCodec() {
|
|||
return mask;
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
this.decodeMask = (tagName, givenMask) => {
|
||||
let mask = givenMask;
|
||||
const value: any[] = [];
|
||||
const value = [];
|
||||
const tag = TagEncoder[tagName].value;
|
||||
Object.keys(TagDecoder[tag].enumeration).forEach(key => {
|
||||
const bit = Buffer.from(key, 'hex').readUInt32BE(0);
|
||||
|
@ -397,17 +393,15 @@ export default function TTLVCodec() {
|
|||
return value;
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
this.decode = (logger, rawMessage) => {
|
||||
const messageContent =
|
||||
TypeDecoder['01'].decode(logger, null, rawMessage);
|
||||
return new KMIPMessage(messageContent);
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
this.encode = message => {
|
||||
const value = message.content;
|
||||
let result: any[] = [];
|
||||
let result = [];
|
||||
value.forEach(item => {
|
||||
Object.keys(item).forEach(key => {
|
||||
if (!TagEncoder[key]) {
|
||||
|
@ -426,13 +420,10 @@ export default function TTLVCodec() {
|
|||
return Buffer.concat(_ttlvPadVector(result));
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
this.mapExtension = (tagName, tagValue) => {
|
||||
const tagValueStr = tagValue.toString(16);
|
||||
TagDecoder[tagValueStr] = { name: tagName };
|
||||
TagEncoder[tagName] = { value: tagValueStr };
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -1,13 +1,6 @@
|
|||
import uuid from 'uuid/v4';
|
||||
import Message from './Message';
|
||||
import * as werelogs from 'werelogs';
|
||||
import uuidv4 from 'uuid/v4';
|
||||
|
||||
type UUIDOptions = { random?: number[]; rng?: () => number[]; } | null;
|
||||
function uuidv4(options: UUIDOptions, buffer: Buffer, offset?: number): Buffer;
|
||||
function uuidv4(options?: UUIDOptions): string;
|
||||
function uuidv4(options?: any, buffer?: any, offset?: any) {
|
||||
return uuid(options, buffer, offset);
|
||||
}
|
||||
import Message from './Message';
|
||||
|
||||
/* This client requires at least a KMIP 1.2 compatible server */
|
||||
const DEFAULT_PROTOCOL_VERSION_MAJOR = 1;
|
||||
|
@ -22,28 +15,19 @@ function _uniqueBatchItemID() {
|
|||
}
|
||||
|
||||
|
||||
function _PrimitiveType(tagName: string, type: string, value: any) {
|
||||
function _PrimitiveType(tagName, type, value) {
|
||||
return { [tagName]: { type, value } };
|
||||
}
|
||||
|
||||
export type Options = { codec: any; transport: any; };
|
||||
export default class KMIP {
|
||||
protocolVersion: {
|
||||
major: number;
|
||||
minor: number;
|
||||
};
|
||||
maximumResponseSize: number;
|
||||
options: Options;
|
||||
codec: any;
|
||||
transport: any;
|
||||
|
||||
/**
|
||||
* Construct a new KMIP Object
|
||||
* @param {Class} Codec -
|
||||
* @param {Class} Transport -
|
||||
* @param {Object} options -
|
||||
* @param {Function} cb -
|
||||
*/
|
||||
constructor(Codec: any, Transport: any, options: { kmip: Options }) {
|
||||
constructor(Codec, Transport, options) {
|
||||
this.protocolVersion = {
|
||||
major: DEFAULT_PROTOCOL_VERSION_MAJOR,
|
||||
minor: DEFAULT_PROTOCOL_VERSION_MINOR,
|
||||
|
@ -58,51 +42,51 @@ export default class KMIP {
|
|||
|
||||
/**
|
||||
* create a new abstract message instance
|
||||
* @param content - Most likely a call to KMIP.Structure
|
||||
* @param {Object} content - Most likely a call to KMIP.Structure
|
||||
* with 'Request Message' as tagName
|
||||
* @returns an instance of Message
|
||||
* @returns {Object} an instance of Message
|
||||
*/
|
||||
static Message(content: any) {
|
||||
static Message(content) {
|
||||
return new Message(content);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a KMIP Structure field instance
|
||||
* @param tagName - Name of the KMIP field
|
||||
* @param value - array of KMIP fields
|
||||
* @returns an abstract KMIP field
|
||||
* @param {String} tagName - Name of the KMIP field
|
||||
* @param {Array} value - array of KMIP fields
|
||||
* @returns {Object} an abstract KMIP field
|
||||
*/
|
||||
static Structure(tagName: string, value: any[]) {
|
||||
static Structure(tagName, value) {
|
||||
return _PrimitiveType(tagName, 'Structure', value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a KMIP Integer field instance
|
||||
* @param tagName - Name of the KMIP field
|
||||
* @param value - a number
|
||||
* @returns an abstract KMIP field
|
||||
* @param {String} tagName - Name of the KMIP field
|
||||
* @param {Number} value - a number
|
||||
* @returns {Object} an abstract KMIP field
|
||||
*/
|
||||
static Integer(tagName: string, value: number) {
|
||||
static Integer(tagName, value) {
|
||||
return _PrimitiveType(tagName, 'Integer', value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a KMIP Long Integer field instance
|
||||
* @param tagName - Name of the KMIP field
|
||||
* @param value - a number (beware of the 53-bit limitation)
|
||||
* @returns an abstract KMIP field
|
||||
* @param {String} tagName - Name of the KMIP field
|
||||
* @param {Number} value - a number (beware of the 53-bit limitation)
|
||||
* @returns {Object} an abstract KMIP field
|
||||
*/
|
||||
static LongInteger(tagName: string, value: number) {
|
||||
static LongInteger(tagName, value) {
|
||||
return _PrimitiveType(tagName, 'LongInteger', value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a KMIP Big Integer field instance
|
||||
* @param tagName - Name of the KMIP field
|
||||
* @param value - buffer containing the big integer
|
||||
* @returns an abstract KMIP field
|
||||
* @param {String} tagName - Name of the KMIP field
|
||||
* @param {Buffer} value - buffer containing the big integer
|
||||
* @returns {Object} an abstract KMIP field
|
||||
*/
|
||||
static BigInteger(tagName: string, value: Buffer) {
|
||||
static BigInteger(tagName, value) {
|
||||
if (value.length % 8 !== 0) {
|
||||
throw Error('Big Integer value length must be a multiple of 8');
|
||||
}
|
||||
|
@ -111,74 +95,74 @@ export default class KMIP {
|
|||
|
||||
/**
|
||||
* Create a KMIP Enumeration field instance
|
||||
* @param tagName - Name of the KMIP Enumeration
|
||||
* @param value - Name of the KMIP Enumeration value
|
||||
* @returns an abstract KMIP field
|
||||
* @param {String} tagName - Name of the KMIP Enumeration
|
||||
* @param {String} value - Name of the KMIP Enumeration value
|
||||
* @returns {Object} an abstract KMIP field
|
||||
*/
|
||||
static Enumeration(tagName: string, value: string) {
|
||||
static Enumeration(tagName, value) {
|
||||
return _PrimitiveType(tagName, 'Enumeration', value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a KMIP Boolean field instance
|
||||
* @param tagName - Name of the KMIP field
|
||||
* @param value - anything falsey or not (converted to a Boolean)
|
||||
* @returns an abstract KMIP field
|
||||
* @param {String} tagName - Name of the KMIP field
|
||||
* @param {Boolean} value - anything falsey or not (converted to a Boolean)
|
||||
* @returns {Object} an abstract KMIP field
|
||||
*/
|
||||
static Boolean(tagName: string, value: boolean) {
|
||||
static Boolean(tagName, value) {
|
||||
return _PrimitiveType(tagName, 'Boolean', !!value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a KMIP Text String field instance
|
||||
* @param tagName - Name of the KMIP field
|
||||
* @param value - the text string
|
||||
* @returns an abstract KMIP field
|
||||
* @param {String} tagName - Name of the KMIP field
|
||||
* @param {String} value - the text string
|
||||
* @returns {Object} an abstract KMIP field
|
||||
*/
|
||||
static TextString(tagName: string, value: string) {
|
||||
static TextString(tagName, value) {
|
||||
return _PrimitiveType(tagName, 'TextString', value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a KMIP Byte String field instance
|
||||
* @param tagName - Name of the KMIP field
|
||||
* @param value - buffer containing the byte string
|
||||
* @returns an abstract KMIP field
|
||||
* @param {String} tagName - Name of the KMIP field
|
||||
* @param {Buffer} value - buffer containing the byte string
|
||||
* @returns {Object} an abstract KMIP field
|
||||
*/
|
||||
static ByteString(tagName: string, value: Buffer) {
|
||||
static ByteString(tagName, value) {
|
||||
return _PrimitiveType(tagName, 'ByteString', value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a KMIP Date-Time field instance
|
||||
* @param tagName - Name of the KMIP field
|
||||
* @param value - instance of a Date (ms are discarded)
|
||||
* @returns an abstract KMIP field
|
||||
* @param {String} tagName - Name of the KMIP field
|
||||
* @param {Date} value - instance of a Date (ms are discarded)
|
||||
* @returns {Object} an abstract KMIP field
|
||||
*/
|
||||
static DateTime(tagName: string, value: Date) {
|
||||
static DateTime(tagName, value) {
|
||||
value.setMilliseconds(0);
|
||||
return _PrimitiveType(tagName, 'Date-Time', value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a KMIP Interval field instance
|
||||
* @param tagName - Name of the KMIP field
|
||||
* @param value - number of seconds of the interval
|
||||
* @returns an abstract KMIP field
|
||||
* @param {String} tagName - Name of the KMIP field
|
||||
* @param {Integer} value - number of seconds of the interval
|
||||
* @returns {Object} an abstract KMIP field
|
||||
*/
|
||||
static Interval(tagName: string, value: number) {
|
||||
static Interval(tagName, value) {
|
||||
return _PrimitiveType(tagName, 'Interval', value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a KMIP Attribute field instance
|
||||
* @param type - type of the attribute value
|
||||
* @param name - Name of the attribute or KMIP field
|
||||
* @param value - value of the field suitable for the
|
||||
* @param {String} type - type of the attribute value
|
||||
* @param {String} name - Name of the attribute or KMIP field
|
||||
* @param {Object} value - value of the field suitable for the
|
||||
* specified type
|
||||
* @returns an abstract KMIP field
|
||||
* @returns {Object} an abstract KMIP field
|
||||
*/
|
||||
static Attribute(type: string, name: string, value: any) {
|
||||
static Attribute(type, name, value) {
|
||||
if (type === 'Date-Time') {
|
||||
value.setMilliseconds(0);
|
||||
}
|
||||
|
@ -210,72 +194,69 @@ export default class KMIP {
|
|||
* Register a higher level handshake function to be called
|
||||
* after the connection is initialized and before the first
|
||||
* message is sent.
|
||||
* @param handshakeFunction - (logger: Object, cb: Function(err))
|
||||
* @param {Function} handshakeFunction - (logger: Object, cb: Function(err))
|
||||
* @returns {undefined}
|
||||
*/
|
||||
registerHandshakeFunction(
|
||||
handshakeFunction: (
|
||||
logger: werelogs.Logger,
|
||||
cb: (error: Error | null) => void,
|
||||
) => void,
|
||||
) {
|
||||
registerHandshakeFunction(handshakeFunction) {
|
||||
this.transport.registerHandshakeFunction(handshakeFunction);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode a raw message, usually received from the transport layer
|
||||
* @param logger - a Logger instance
|
||||
* @param rawMessage - the message to decode
|
||||
* @returns the decoded message as an instance of KMIP.Message
|
||||
* @param {Object} logger - a Logger instance
|
||||
* @param {Buffer} rawMessage - the message to decode
|
||||
* @returns {Object} the decoded message as an instance of KMIP.Message
|
||||
*/
|
||||
_decodeMessage(logger: werelogs.Logger, rawMessage: Buffer) {
|
||||
_decodeMessage(logger, rawMessage) {
|
||||
return this.codec.decode(logger, rawMessage);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode an message
|
||||
* @param message - Instance of a KMIP.Message
|
||||
* @returns the encoded message suitable for the transport layer
|
||||
* @param {Object} message - Instance of a KMIP.Message
|
||||
* @returns {Buffer} the encoded message suitable for the transport layer
|
||||
*/
|
||||
_encodeMessage(message: any) {
|
||||
_encodeMessage(message) {
|
||||
return this.codec.encode(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode a bitmask
|
||||
* @param tagName - name of the bit mask defining tag
|
||||
* @param givenMask - bit mask to decode
|
||||
* @returns array of named bits set in the given bit mask
|
||||
* @param {string} tagName - name of the bit mask defining tag
|
||||
* @param {Integer} givenMask - bit mask to decode
|
||||
* @returns {Array} array of named bits set in the given bit mask
|
||||
*/
|
||||
decodeMask(tagName: string, givenMask: number) {
|
||||
decodeMask(tagName, givenMask) {
|
||||
return this.codec.decodeMask(tagName, givenMask);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode a bitmask
|
||||
* @param tagName - name of the bit mask defining tag
|
||||
* @param value - array of named bits to set in the mask
|
||||
* @returns Integer encoded bitmask
|
||||
* @param {String} tagName - name of the bit mask defining tag
|
||||
* @param {Array} value - array of named bits to set in the mask
|
||||
* @returns {Integer} Integer encoded bitmask
|
||||
*/
|
||||
encodeMask(tagName: string, value: any[]): number {
|
||||
encodeMask(tagName, value) {
|
||||
return this.codec.encodeMask(tagName, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Amend the tag nomenclature with a vendor specific extension
|
||||
* @param extensionName - Name of the tag to record
|
||||
* @param extensionTag - Tag value represented as an integer
|
||||
* @param {String} extensionName - Name of the tag to record
|
||||
* @param {Integer} extensionTag - Tag value represented as an integer
|
||||
* @returns {undefined}
|
||||
*/
|
||||
mapExtension(extensionName: string, extensionTag: number) {
|
||||
mapExtension(extensionName, extensionTag) {
|
||||
return this.codec.mapExtension(extensionName, extensionTag);
|
||||
}
|
||||
|
||||
changeProtocolVersion(major: number, minor: number) {
|
||||
changeProtocolVersion(major, minor) {
|
||||
this.protocolVersion = { major, minor };
|
||||
}
|
||||
|
||||
/**
|
||||
* Send an operation request message to the KMIP Server
|
||||
* @param logger - Werelog logger object
|
||||
* @param {Object} logger - Werelog logger object
|
||||
* @param {String} operation - The name of the operation as defined in
|
||||
* the KMIP protocol specification.
|
||||
* @param {Object} payload - payload of the operation request. Specifically
|
||||
|
@ -284,7 +265,7 @@ export default class KMIP {
|
|||
* @param {Function} cb - The callback(error: Object, response: Object)
|
||||
* @returns {undefined}
|
||||
*/
|
||||
request(logger: werelogs.Logger, operation: string, payload: any, cb: (error: Error | null, response?: any) => void) {
|
||||
request(logger, operation, payload, cb) {
|
||||
const uuid = _uniqueBatchItemID();
|
||||
const message = KMIP.Message([
|
||||
KMIP.Structure('Request Message', [
|
||||
|
@ -333,8 +314,6 @@ export default class KMIP {
|
|||
if (performedOperation !== operation) {
|
||||
this.transport.abortPipeline(conversation);
|
||||
const error = Error('Operation mismatch',
|
||||
// TODO
|
||||
// @ts-ignore
|
||||
{ got: performedOperation,
|
||||
expected: operation });
|
||||
logger.error('KMIP::request: Operation mismatch',
|
||||
|
@ -349,8 +328,6 @@ export default class KMIP {
|
|||
response.lookup(
|
||||
'Response Message/Batch Item/Result Message')[0];
|
||||
const error = Error('KMIP request failure',
|
||||
// TODO
|
||||
// @ts-ignore
|
||||
{ resultStatus,
|
||||
resultReason,
|
||||
resultMessage });
|
||||
|
|
|
@ -1,40 +1,19 @@
|
|||
import assert from 'assert';
|
||||
import tls from 'tls';
|
||||
import * as werelogs from 'werelogs';
|
||||
|
||||
const DEFAULT_PIPELINE_DEPTH = 8;
|
||||
const DEFAULT_KMIP_PORT = 5696;
|
||||
|
||||
export type Options = {
|
||||
pipelineDepth: number;
|
||||
tls: {
|
||||
port: number;
|
||||
};
|
||||
}
|
||||
|
||||
export default class TransportTemplate {
|
||||
channel: typeof tls;
|
||||
options: Options;
|
||||
pipelineDepth: number;
|
||||
callbackPipeline: ((error: Error | null, socket?: any, data?: any) => void)[];
|
||||
deferedRequests: Array<{
|
||||
encodedMessage: Buffer;
|
||||
cb: ((error: Error | null, data?: any) => void)
|
||||
}>;
|
||||
pipelineDrainedCallback: any | null;
|
||||
handshakeFunction: any | null;
|
||||
socket: any;
|
||||
|
||||
/**
|
||||
* Construct a new object of the TransportTemplate class
|
||||
* @param channel - Typically the tls object
|
||||
* @param options - Instance options
|
||||
* @param options.pipelineDepth - depth of the pipeline
|
||||
* @param options.tls - Standard TLS socket initialization
|
||||
* @param {Object} channel - Typically the tls object
|
||||
* @param {Object} options - Instance options
|
||||
* @param {Number} options.pipelineDepth - depth of the pipeline
|
||||
* @param {Object} options.tls - Standard TLS socket initialization
|
||||
* parameters
|
||||
* @param options.tls.port - TLS server port to connect to
|
||||
* @param {Number} options.tls.port - TLS server port to connect to
|
||||
*/
|
||||
constructor(channel: typeof tls, options: Options) {
|
||||
constructor(channel, options) {
|
||||
this.channel = channel;
|
||||
this.options = options;
|
||||
this.pipelineDepth = Math.max(1, options.pipelineDepth ||
|
||||
|
@ -49,9 +28,10 @@ export default class TransportTemplate {
|
|||
/**
|
||||
* Drain the outstanding and defered request queues by
|
||||
* calling the associated callback with an error
|
||||
* @param error - the error to call the callback function with.
|
||||
* @param {Error} error - the error to call the callback function with.
|
||||
* @returns {undefined}
|
||||
*/
|
||||
_drainQueuesWithError(error: Error) {
|
||||
_drainQueuesWithError(error) {
|
||||
this.callbackPipeline.forEach(queuedCallback => {
|
||||
queuedCallback(error);
|
||||
});
|
||||
|
@ -66,29 +46,23 @@ export default class TransportTemplate {
|
|||
* Register a higher level handshake function to be called
|
||||
* after the connection is initialized and before the first
|
||||
* message is sent.
|
||||
* @param handshakeFunction - (logger: Object, cb: Function(err))
|
||||
* @param {Function} handshakeFunction - (logger: Object, cb: Function(err))
|
||||
* @returns {undefined}
|
||||
*/
|
||||
registerHandshakeFunction(
|
||||
handshakeFunction: (
|
||||
logger: werelogs.Logger,
|
||||
cb: (error: Error | null) => void,
|
||||
) => void,
|
||||
) {
|
||||
registerHandshakeFunction(handshakeFunction) {
|
||||
this.handshakeFunction = handshakeFunction;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new conversation (e.g. a socket) between the client
|
||||
* and the server.
|
||||
* @param logger - Werelogs logger object
|
||||
* @param readyCallback - callback function to call when the
|
||||
* @param {Object} logger - Werelogs logger object
|
||||
* @param {Function} readyCallback - callback function to call when the
|
||||
* conversation is ready to be initiated
|
||||
* func(err: Error)
|
||||
* @returns {undefined}
|
||||
*/
|
||||
_createConversation(
|
||||
logger: werelogs.Logger,
|
||||
readyCallback: (error: Error | null) => void,
|
||||
) {
|
||||
_createConversation(logger, readyCallback) {
|
||||
try {
|
||||
const socket = this.channel.connect(
|
||||
this.options.tls.port || DEFAULT_KMIP_PORT,
|
||||
|
@ -102,18 +76,16 @@ export default class TransportTemplate {
|
|||
});
|
||||
socket.on('data', data => {
|
||||
const queuedCallback = this.callbackPipeline.shift();
|
||||
queuedCallback?.(null, socket, data);
|
||||
queuedCallback(null, socket, data);
|
||||
|
||||
if (this.callbackPipeline.length <
|
||||
this.pipelineDepth &&
|
||||
this.deferedRequests.length > 0) {
|
||||
const deferedRequest = this.deferedRequests.shift();
|
||||
process.nextTick(() => {
|
||||
if (deferedRequest) {
|
||||
this.send(logger,
|
||||
deferedRequest.encodedMessage,
|
||||
deferedRequest.cb);
|
||||
}
|
||||
});
|
||||
} else if (this.callbackPipeline.length === 0 &&
|
||||
this.deferedRequests.length === 0 &&
|
||||
|
@ -131,18 +103,14 @@ export default class TransportTemplate {
|
|||
this._drainQueuesWithError(err);
|
||||
});
|
||||
this.socket = socket;
|
||||
} catch (err: any) {
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
this._drainQueuesWithError(err);
|
||||
readyCallback(err);
|
||||
}
|
||||
}
|
||||
|
||||
_doSend(
|
||||
logger: werelogs.Logger,
|
||||
encodedMessage: Buffer,
|
||||
cb: (error: Error | null, socket?: any, data?: any) => void,
|
||||
) {
|
||||
_doSend(logger, encodedMessage, cb) {
|
||||
this.callbackPipeline.push(cb);
|
||||
if (this.socket === null || this.socket.destroyed) {
|
||||
this._createConversation(logger, () => {});
|
||||
|
@ -158,16 +126,13 @@ export default class TransportTemplate {
|
|||
|
||||
/**
|
||||
* Send an encoded message to the server
|
||||
* @param logger - Werelogs logger object
|
||||
* @param encodedMessage - the encoded message to send to the
|
||||
* @param {Object} logger - Werelogs logger object
|
||||
* @param {Buffer} encodedMessage - the encoded message to send to the
|
||||
* server
|
||||
* @param cb - (err, conversation, rawResponse)
|
||||
* @param {Function} cb - (err, conversation, rawResponse)
|
||||
* @returns {undefined}
|
||||
*/
|
||||
send(
|
||||
logger: werelogs.Logger,
|
||||
encodedMessage: Buffer,
|
||||
cb: (error: Error | null, conversation?: any, rawResponse?: any) => void,
|
||||
) {
|
||||
send(logger, encodedMessage, cb) {
|
||||
if (this.callbackPipeline.length >= this.pipelineDepth) {
|
||||
return this.deferedRequests.push({ encodedMessage, cb });
|
||||
}
|
||||
|
@ -179,6 +144,7 @@ export default class TransportTemplate {
|
|||
* Gracefuly interrupt the conversation. If the caller keeps sending
|
||||
* message after calling this function, the conversation won't
|
||||
* converge to its end.
|
||||
* @returns {undefined}
|
||||
*/
|
||||
end() {
|
||||
if (!this.socket) {
|
||||
|
@ -195,9 +161,10 @@ export default class TransportTemplate {
|
|||
/**
|
||||
* Abruptly interrupt the conversation and cancel the outstanding and
|
||||
* defered requests
|
||||
* @param conversation - the conversation to abort
|
||||
* @param {Object} conversation - the conversation to abort
|
||||
* @returns {undefined}
|
||||
*/
|
||||
abortPipeline(conversation: any) {
|
||||
abortPipeline(conversation) {
|
||||
conversation.end();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import tls from 'tls';
|
||||
import TransportTemplate, { Options } from './TransportTemplate';
|
||||
import * as tls from 'tls';
|
||||
import TransportTemplate from './TransportTemplate';
|
||||
|
||||
export default class TlsTransport extends TransportTemplate {
|
||||
constructor(options: Options) {
|
||||
constructor(options) {
|
||||
super(tls, options);
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue