Compare commits

..

6 Commits

Author SHA1 Message Date
williamlardier 47a7253706
wip2 2023-04-06 14:29:46 +02:00
williamlardier 971c8dbc0a
wip 2023-04-06 14:11:21 +02:00
williamlardier 1d243f831a
ARSN-329: bump to 8.1.93 2023-04-06 12:24:12 +02:00
williamlardier 9949c62f73
ARSN-329: update tests 2023-04-06 12:24:11 +02:00
williamlardier 3fc3e095d9
ARSN-329: switch to promises as callbacks are deprecated 2023-04-06 09:08:52 +02:00
williamlardier 6183b3a8b9
ARSN-329: bump mongodb driver 2023-04-06 08:59:55 +02:00
125 changed files with 11567 additions and 14849 deletions

View File

@ -1,6 +1 @@
{ { "extends": "scality" }
"extends": "scality",
"parserOptions": {
"ecmaVersion": 2020
}
}

View File

@ -1,25 +0,0 @@
---
name: codeQL
on:
push:
branches: [development/*, stabilization/*, hotfix/*]
pull_request:
branches: [development/*, stabilization/*, hotfix/*]
workflow_dispatch:
jobs:
analyze:
name: Static analysis with CodeQL
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: javascript, typescript
- name: Build and analyze
uses: github/codeql-action/analyze@v3

View File

@ -1,16 +0,0 @@
---
name: dependency review
on:
pull_request:
branches: [development/*, stabilization/*, hotfix/*]
jobs:
dependency-review:
runs-on: ubuntu-latest
steps:
- name: 'Checkout Repository'
uses: actions/checkout@v4
- name: 'Dependency Review'
uses: actions/dependency-review-action@v4

View File

@ -25,8 +25,8 @@ jobs:
- 6379:6379 - 6379:6379
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v2
- uses: actions/setup-node@v4 - uses: actions/setup-node@v2
with: with:
node-version: '16' node-version: '16'
cache: 'yarn' cache: 'yarn'
@ -46,9 +46,7 @@ jobs:
run: yarn --silent coverage run: yarn --silent coverage
- name: run functional tests - name: run functional tests
run: yarn ft_test run: yarn ft_test
- uses: codecov/codecov-action@v4 - uses: codecov/codecov-action@v2
with:
token: ${{ secrets.CODECOV_TOKEN }}
- name: run executables tests - name: run executables tests
run: yarn install && yarn test run: yarn install && yarn test
working-directory: 'lib/executables/pensieveCreds/' working-directory: 'lib/executables/pensieveCreds/'
@ -59,9 +57,9 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v2
- name: Install NodeJS - name: Install NodeJS
uses: actions/setup-node@v4 uses: actions/setup-node@v2
with: with:
node-version: '16' node-version: '16'
cache: yarn cache: yarn
@ -72,7 +70,7 @@ jobs:
run: yarn build run: yarn build
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
- name: Upload artifacts - name: Upload artifacts
uses: scality/action-artifacts@v4 uses: scality/action-artifacts@v2
with: with:
url: https://artifacts.scality.net url: https://artifacts.scality.net
user: ${{ secrets.ARTIFACTS_USER }} user: ${{ secrets.ARTIFACTS_USER }}

12
.swcrc
View File

@ -1,12 +0,0 @@
{
"$schema": "https://swc.rs/schema.json",
"jsc": {
"parser": {
"syntax": "typescript"
},
"target": "es2017"
},
"module": {
"type": "commonjs"
}
}

View File

@ -246,15 +246,3 @@ For capacity-enabled buckets, contains the following data:
### Usage ### Usage
Used to store bucket tagging Used to store bucket tagging
## Model version 17
### Properties Added
```javascript
this._quotaMax = quotaMax || 0;
```
### Usage
Used to store bucket quota

View File

@ -1,27 +0,0 @@
# Delimiter
The Delimiter class handles raw listings from the database with an
optional delimiter, and fills in a curated listing with "Contents" and
"CommonPrefixes" as a result.
## Expected Behavior
- only lists keys belonging to the given **prefix** (if provided)
- groups listed keys that have a common prefix ending with a delimiter
inside CommonPrefixes
- can take a **marker** or **continuationToken** to list from a specific key
- can take a **maxKeys** parameter to limit how many keys can be returned
## State Chart
- States with grey background are *Idle* states, which are waiting for
a new listing key
- States with blue background are *Processing* states, which are
actively processing a new listing key passed by the filter()
function
![Delimiter State Chart](./pics/delimiterStateChart.svg)

View File

@ -1,45 +0,0 @@
# DelimiterMaster
The DelimiterMaster class handles raw listings from the database of a
versioned or non-versioned bucket with an optional delimiter, and
fills in a curated listing with "Contents" and "CommonPrefixes" as a
result.
## Expected Behavior
- only lists latest versions of versioned buckets
- only lists keys belonging to the given **prefix** (if provided)
- does not list latest versions that are delete markers
- groups listed keys that have a common prefix ending with a delimiter
inside CommonPrefixes
- can take a **marker** or **continuationToken** to list from a specific key
- can take a **maxKeys** parameter to limit how many keys can be returned
- reconciles internal PHD keys with the next version (those are
created when a specific version that is the latest version is
deleted)
- skips internal keys like replay keys
## State Chart
- States with grey background are *Idle* states, which are waiting for
a new listing key
- States with blue background are *Processing* states, which are
actively processing a new listing key passed by the filter()
function
### Bucket Vformat=v0
![DelimiterMaster State Chart for v0 format](./pics/delimiterMasterV0StateChart.svg)
### Bucket Vformat=v1
For buckets in versioning key format **v1**, the algorithm used is the
one from [Delimiter](delimiter.md).

View File

@ -1,45 +0,0 @@
digraph {
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
edge [fontsize=14];
rankdir=TB;
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
node [fillcolor="lightgrey"];
"NotSkippingPrefixNorVersions.Idle" [label="NotSkippingPrefixNorVersions",group="NotSkippingPrefixNorVersions",width=4];
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
"SkippingVersions.Idle" [label="SkippingVersions",group="SkippingVersions"];
"WaitVersionAfterPHD.Idle" [label="WaitVersionAfterPHD",group="WaitVersionAfterPHD"];
node [fillcolor="lightblue"];
"NotSkippingPrefixNorVersions.Processing" [label="NotSkippingPrefixNorVersions",group="NotSkippingPrefixNorVersions",width=4];
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
"SkippingVersions.Processing" [label="SkippingVersions",group="SkippingVersions"];
"WaitVersionAfterPHD.Processing" [label="WaitVersionAfterPHD",group="WaitVersionAfterPHD"];
START -> "SkippingVersions.Idle" [label="[marker != undefined]"]
START -> "NotSkippingPrefixNorVersions.Idle" [label="[marker == undefined]"]
"NotSkippingPrefixNorVersions.Idle" -> "NotSkippingPrefixNorVersions.Processing" [label="filter(key, value)"]
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
"SkippingVersions.Idle" -> "SkippingVersions.Processing" [label="filter(key, value)"]
"WaitVersionAfterPHD.Idle" -> "WaitVersionAfterPHD.Processing" [label="filter(key, value)"]
"NotSkippingPrefixNorVersions.Processing" -> "SkippingVersions.Idle" [label="[Version.isDeleteMarker(value)]\n-> FILTER_ACCEPT"]
"NotSkippingPrefixNorVersions.Processing" -> "WaitVersionAfterPHD.Idle" [label="[Version.isPHD(value)]\n-> FILTER_ACCEPT"]
"NotSkippingPrefixNorVersions.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(<ReplayPrefix>)]\n/ prefix <- <ReplayPrefix>\n-> FILTER_SKIP"]
"NotSkippingPrefixNorVersions.Processing" -> END [label="[isListableKey(key, value) and\nKeys == maxKeys]\n-> FILTER_END"]
"NotSkippingPrefixNorVersions.Processing" -> "SkippingPrefix.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nhasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
"NotSkippingPrefixNorVersions.Processing" -> "SkippingVersions.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nnot hasDelimiter(key)]\n/ Contents.append(key, value)\n-> FILTER_ACCEPT"]
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
"SkippingPrefix.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not key.startsWith(prefix)]"]
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="[isVersionKey(key)]\n-> FILTER_SKIP"]
"SkippingVersions.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not isVersionKey(key)]"]
"WaitVersionAfterPHD.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[isVersionKey(key) and master(key) == PHDkey]\n/ key <- master(key)"]
"WaitVersionAfterPHD.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not isVersionKey(key) or master(key) != PHDkey]"]
}

View File

@ -1,216 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Generated by graphviz version 2.43.0 (0)
-->
<!-- Title: %3 Pages: 1 -->
<svg width="2313pt" height="460pt"
viewBox="0.00 0.00 2313.37 460.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 456)">
<title>%3</title>
<polygon fill="white" stroke="transparent" points="-4,4 -4,-456 2309.37,-456 2309.37,4 -4,4"/>
<!-- START -->
<g id="node1" class="node">
<title>START</title>
<ellipse fill="black" stroke="black" cx="35.37" cy="-445" rx="7" ry="7"/>
</g>
<!-- NotSkippingPrefixNorVersions.Idle -->
<g id="node3" class="node">
<title>NotSkippingPrefixNorVersions.Idle</title>
<path fill="lightgrey" stroke="black" d="M925.37,-387C925.37,-387 661.37,-387 661.37,-387 655.37,-387 649.37,-381 649.37,-375 649.37,-375 649.37,-363 649.37,-363 649.37,-357 655.37,-351 661.37,-351 661.37,-351 925.37,-351 925.37,-351 931.37,-351 937.37,-357 937.37,-363 937.37,-363 937.37,-375 937.37,-375 937.37,-381 931.37,-387 925.37,-387"/>
<text text-anchor="middle" x="793.37" y="-365.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefixNorVersions</text>
</g>
<!-- START&#45;&gt;NotSkippingPrefixNorVersions.Idle -->
<g id="edge2" class="edge">
<title>START&#45;&gt;NotSkippingPrefixNorVersions.Idle</title>
<path fill="none" stroke="black" d="M42.39,-443.31C95.3,-438.15 434.98,-404.99 638.94,-385.08"/>
<polygon fill="black" stroke="black" points="639.54,-388.53 649.15,-384.08 638.86,-381.57 639.54,-388.53"/>
<text text-anchor="middle" x="497.87" y="-408.8" font-family="Times,serif" font-size="14.00">[marker == undefined]</text>
</g>
<!-- SkippingVersions.Idle -->
<g id="node5" class="node">
<title>SkippingVersions.Idle</title>
<path fill="lightgrey" stroke="black" d="M242.37,-138C242.37,-138 50.37,-138 50.37,-138 44.37,-138 38.37,-132 38.37,-126 38.37,-126 38.37,-114 38.37,-114 38.37,-108 44.37,-102 50.37,-102 50.37,-102 242.37,-102 242.37,-102 248.37,-102 254.37,-108 254.37,-114 254.37,-114 254.37,-126 254.37,-126 254.37,-132 248.37,-138 242.37,-138"/>
<text text-anchor="middle" x="146.37" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
</g>
<!-- START&#45;&gt;SkippingVersions.Idle -->
<g id="edge1" class="edge">
<title>START&#45;&gt;SkippingVersions.Idle</title>
<path fill="none" stroke="black" d="M33.04,-438.14C20.64,-405.9 -34.57,-248.17 33.37,-156 36.76,-151.4 40.74,-147.39 45.16,-143.89"/>
<polygon fill="black" stroke="black" points="47.27,-146.68 53.53,-138.13 43.3,-140.92 47.27,-146.68"/>
<text text-anchor="middle" x="85.87" y="-321.8" font-family="Times,serif" font-size="14.00">[marker != undefined]</text>
</g>
<!-- END -->
<g id="node2" class="node">
<title>END</title>
<ellipse fill="black" stroke="black" cx="727.37" cy="-120" rx="7" ry="7"/>
<ellipse fill="none" stroke="black" cx="727.37" cy="-120" rx="11" ry="11"/>
</g>
<!-- NotSkippingPrefixNorVersions.Processing -->
<g id="node7" class="node">
<title>NotSkippingPrefixNorVersions.Processing</title>
<path fill="lightblue" stroke="black" d="M925.37,-300C925.37,-300 661.37,-300 661.37,-300 655.37,-300 649.37,-294 649.37,-288 649.37,-288 649.37,-276 649.37,-276 649.37,-270 655.37,-264 661.37,-264 661.37,-264 925.37,-264 925.37,-264 931.37,-264 937.37,-270 937.37,-276 937.37,-276 937.37,-288 937.37,-288 937.37,-294 931.37,-300 925.37,-300"/>
<text text-anchor="middle" x="793.37" y="-278.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefixNorVersions</text>
</g>
<!-- NotSkippingPrefixNorVersions.Idle&#45;&gt;NotSkippingPrefixNorVersions.Processing -->
<g id="edge3" class="edge">
<title>NotSkippingPrefixNorVersions.Idle&#45;&gt;NotSkippingPrefixNorVersions.Processing</title>
<path fill="none" stroke="black" d="M793.37,-350.8C793.37,-339.16 793.37,-323.55 793.37,-310.24"/>
<polygon fill="black" stroke="black" points="796.87,-310.18 793.37,-300.18 789.87,-310.18 796.87,-310.18"/>
<text text-anchor="middle" x="851.37" y="-321.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
</g>
<!-- SkippingPrefix.Idle -->
<g id="node4" class="node">
<title>SkippingPrefix.Idle</title>
<path fill="lightgrey" stroke="black" d="M1209.37,-138C1209.37,-138 1017.37,-138 1017.37,-138 1011.37,-138 1005.37,-132 1005.37,-126 1005.37,-126 1005.37,-114 1005.37,-114 1005.37,-108 1011.37,-102 1017.37,-102 1017.37,-102 1209.37,-102 1209.37,-102 1215.37,-102 1221.37,-108 1221.37,-114 1221.37,-114 1221.37,-126 1221.37,-126 1221.37,-132 1215.37,-138 1209.37,-138"/>
<text text-anchor="middle" x="1113.37" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
</g>
<!-- SkippingPrefix.Processing -->
<g id="node8" class="node">
<title>SkippingPrefix.Processing</title>
<path fill="lightblue" stroke="black" d="M1070.37,-36C1070.37,-36 878.37,-36 878.37,-36 872.37,-36 866.37,-30 866.37,-24 866.37,-24 866.37,-12 866.37,-12 866.37,-6 872.37,0 878.37,0 878.37,0 1070.37,0 1070.37,0 1076.37,0 1082.37,-6 1082.37,-12 1082.37,-12 1082.37,-24 1082.37,-24 1082.37,-30 1076.37,-36 1070.37,-36"/>
<text text-anchor="middle" x="974.37" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
</g>
<!-- SkippingPrefix.Idle&#45;&gt;SkippingPrefix.Processing -->
<g id="edge4" class="edge">
<title>SkippingPrefix.Idle&#45;&gt;SkippingPrefix.Processing</title>
<path fill="none" stroke="black" d="M1011.89,-101.96C994.96,-97.13 981.04,-91.17 975.37,-84 967.11,-73.56 966.25,-58.93 967.72,-46.2"/>
<polygon fill="black" stroke="black" points="971.22,-46.52 969.4,-36.09 964.31,-45.38 971.22,-46.52"/>
<text text-anchor="middle" x="1033.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
</g>
<!-- SkippingVersions.Processing -->
<g id="node9" class="node">
<title>SkippingVersions.Processing</title>
<path fill="lightblue" stroke="black" d="M381.37,-36C381.37,-36 189.37,-36 189.37,-36 183.37,-36 177.37,-30 177.37,-24 177.37,-24 177.37,-12 177.37,-12 177.37,-6 183.37,0 189.37,0 189.37,0 381.37,0 381.37,0 387.37,0 393.37,-6 393.37,-12 393.37,-12 393.37,-24 393.37,-24 393.37,-30 387.37,-36 381.37,-36"/>
<text text-anchor="middle" x="285.37" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
</g>
<!-- SkippingVersions.Idle&#45;&gt;SkippingVersions.Processing -->
<g id="edge5" class="edge">
<title>SkippingVersions.Idle&#45;&gt;SkippingVersions.Processing</title>
<path fill="none" stroke="black" d="M141.4,-101.91C138.35,-87.58 136.8,-67.37 147.37,-54 151.89,-48.28 161.64,-43.34 173.99,-39.12"/>
<polygon fill="black" stroke="black" points="175.39,-42.36 183.89,-36.04 173.3,-35.67 175.39,-42.36"/>
<text text-anchor="middle" x="205.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
</g>
<!-- WaitVersionAfterPHD.Idle -->
<g id="node6" class="node">
<title>WaitVersionAfterPHD.Idle</title>
<path fill="lightgrey" stroke="black" d="M1534.37,-138C1534.37,-138 1342.37,-138 1342.37,-138 1336.37,-138 1330.37,-132 1330.37,-126 1330.37,-126 1330.37,-114 1330.37,-114 1330.37,-108 1336.37,-102 1342.37,-102 1342.37,-102 1534.37,-102 1534.37,-102 1540.37,-102 1546.37,-108 1546.37,-114 1546.37,-114 1546.37,-126 1546.37,-126 1546.37,-132 1540.37,-138 1534.37,-138"/>
<text text-anchor="middle" x="1438.37" y="-116.2" font-family="Times,serif" font-size="16.00">WaitVersionAfterPHD</text>
</g>
<!-- WaitVersionAfterPHD.Processing -->
<g id="node10" class="node">
<title>WaitVersionAfterPHD.Processing</title>
<path fill="lightblue" stroke="black" d="M1534.37,-36C1534.37,-36 1342.37,-36 1342.37,-36 1336.37,-36 1330.37,-30 1330.37,-24 1330.37,-24 1330.37,-12 1330.37,-12 1330.37,-6 1336.37,0 1342.37,0 1342.37,0 1534.37,0 1534.37,0 1540.37,0 1546.37,-6 1546.37,-12 1546.37,-12 1546.37,-24 1546.37,-24 1546.37,-30 1540.37,-36 1534.37,-36"/>
<text text-anchor="middle" x="1438.37" y="-14.2" font-family="Times,serif" font-size="16.00">WaitVersionAfterPHD</text>
</g>
<!-- WaitVersionAfterPHD.Idle&#45;&gt;WaitVersionAfterPHD.Processing -->
<g id="edge6" class="edge">
<title>WaitVersionAfterPHD.Idle&#45;&gt;WaitVersionAfterPHD.Processing</title>
<path fill="none" stroke="black" d="M1438.37,-101.58C1438.37,-86.38 1438.37,-64.07 1438.37,-46.46"/>
<polygon fill="black" stroke="black" points="1441.87,-46.22 1438.37,-36.22 1434.87,-46.22 1441.87,-46.22"/>
<text text-anchor="middle" x="1496.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
</g>
<!-- NotSkippingPrefixNorVersions.Processing&#45;&gt;END -->
<g id="edge10" class="edge">
<title>NotSkippingPrefixNorVersions.Processing&#45;&gt;END</title>
<path fill="none" stroke="black" d="M649.15,-273.62C611.7,-268.54 578.44,-260.07 566.37,-246 540.33,-215.64 540,-186.08 566.37,-156 586.46,-133.07 673.88,-148.86 702.37,-138 705.22,-136.91 708.06,-135.44 710.76,-133.82"/>
<polygon fill="black" stroke="black" points="712.88,-136.61 719.13,-128.05 708.91,-130.84 712.88,-136.61"/>
<text text-anchor="middle" x="672.87" y="-212.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
<text text-anchor="middle" x="672.87" y="-197.3" font-family="Times,serif" font-size="14.00">Keys == maxKeys]</text>
<text text-anchor="middle" x="672.87" y="-182.3" font-family="Times,serif" font-size="14.00">&#45;&gt; FILTER_END</text>
</g>
<!-- NotSkippingPrefixNorVersions.Processing&#45;&gt;SkippingPrefix.Idle -->
<g id="edge9" class="edge">
<title>NotSkippingPrefixNorVersions.Processing&#45;&gt;SkippingPrefix.Idle</title>
<path fill="none" stroke="black" d="M937.6,-274.31C1018.89,-269.01 1106.69,-260.11 1119.37,-246 1143.16,-219.51 1134.03,-175.72 1124.38,-147.62"/>
<polygon fill="black" stroke="black" points="1127.6,-146.22 1120.86,-138.04 1121.03,-148.64 1127.6,-146.22"/>
<text text-anchor="middle" x="1254.37" y="-212.3" font-family="Times,serif" font-size="14.00">[key.startsWith(&lt;ReplayPrefix&gt;)]</text>
<text text-anchor="middle" x="1254.37" y="-197.3" font-family="Times,serif" font-size="14.00">/ prefix &lt;&#45; &lt;ReplayPrefix&gt;</text>
<text text-anchor="middle" x="1254.37" y="-182.3" font-family="Times,serif" font-size="14.00">&#45;&gt; FILTER_SKIP</text>
</g>
<!-- NotSkippingPrefixNorVersions.Processing&#45;&gt;SkippingPrefix.Idle -->
<g id="edge11" class="edge">
<title>NotSkippingPrefixNorVersions.Processing&#45;&gt;SkippingPrefix.Idle</title>
<path fill="none" stroke="black" d="M799.18,-263.65C800.96,-258.05 802.85,-251.79 804.37,-246 814.73,-206.45 793.03,-183.41 823.37,-156 851.23,-130.83 954.1,-142.59 991.37,-138 992.65,-137.84 993.94,-137.68 995.24,-137.52"/>
<polygon fill="black" stroke="black" points="995.81,-140.98 1005.29,-136.25 994.93,-134.03 995.81,-140.98"/>
<text text-anchor="middle" x="969.37" y="-234.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
<text text-anchor="middle" x="969.37" y="-219.8" font-family="Times,serif" font-size="14.00">nKeys &lt; maxKeys and</text>
<text text-anchor="middle" x="969.37" y="-204.8" font-family="Times,serif" font-size="14.00">hasDelimiter(key)]</text>
<text text-anchor="middle" x="969.37" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix &lt;&#45; prefixOf(key)</text>
<text text-anchor="middle" x="969.37" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
<text text-anchor="middle" x="969.37" y="-159.8" font-family="Times,serif" font-size="14.00">&#45;&gt; FILTER_ACCEPT</text>
</g>
<!-- NotSkippingPrefixNorVersions.Processing&#45;&gt;SkippingVersions.Idle -->
<g id="edge7" class="edge">
<title>NotSkippingPrefixNorVersions.Processing&#45;&gt;SkippingVersions.Idle</title>
<path fill="none" stroke="black" d="M649.11,-279.23C439.56,-275.94 73.58,-267.19 53.37,-246 25.76,-217.06 30.6,-188.89 53.37,-156 56.56,-151.39 60.44,-147.39 64.78,-143.91"/>
<polygon fill="black" stroke="black" points="66.8,-146.76 73.04,-138.2 62.83,-141 66.8,-146.76"/>
<text text-anchor="middle" x="167.87" y="-204.8" font-family="Times,serif" font-size="14.00">[Version.isDeleteMarker(value)]</text>
<text text-anchor="middle" x="167.87" y="-189.8" font-family="Times,serif" font-size="14.00">&#45;&gt; FILTER_ACCEPT</text>
</g>
<!-- NotSkippingPrefixNorVersions.Processing&#45;&gt;SkippingVersions.Idle -->
<g id="edge12" class="edge">
<title>NotSkippingPrefixNorVersions.Processing&#45;&gt;SkippingVersions.Idle</title>
<path fill="none" stroke="black" d="M649.33,-279.1C514.97,-275.99 331.4,-267.75 305.37,-246 273.69,-219.53 311.53,-185.22 282.37,-156 276.73,-150.36 270.32,-145.59 263.42,-141.56"/>
<polygon fill="black" stroke="black" points="264.92,-138.39 254.44,-136.84 261.67,-144.59 264.92,-138.39"/>
<text text-anchor="middle" x="411.87" y="-227.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
<text text-anchor="middle" x="411.87" y="-212.3" font-family="Times,serif" font-size="14.00">nKeys &lt; maxKeys and</text>
<text text-anchor="middle" x="411.87" y="-197.3" font-family="Times,serif" font-size="14.00">not hasDelimiter(key)]</text>
<text text-anchor="middle" x="411.87" y="-182.3" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
<text text-anchor="middle" x="411.87" y="-167.3" font-family="Times,serif" font-size="14.00">&#45;&gt; FILTER_ACCEPT</text>
</g>
<!-- NotSkippingPrefixNorVersions.Processing&#45;&gt;WaitVersionAfterPHD.Idle -->
<g id="edge8" class="edge">
<title>NotSkippingPrefixNorVersions.Processing&#45;&gt;WaitVersionAfterPHD.Idle</title>
<path fill="none" stroke="black" d="M937.38,-280.87C1099.43,-279.42 1344.59,-272.74 1378.37,-246 1411.11,-220.08 1384.48,-192.16 1405.37,-156 1407.38,-152.52 1409.8,-149.11 1412.4,-145.87"/>
<polygon fill="black" stroke="black" points="1415.16,-148.04 1419.13,-138.21 1409.9,-143.41 1415.16,-148.04"/>
<text text-anchor="middle" x="1486.87" y="-204.8" font-family="Times,serif" font-size="14.00">[Version.isPHD(value)]</text>
<text text-anchor="middle" x="1486.87" y="-189.8" font-family="Times,serif" font-size="14.00">&#45;&gt; FILTER_ACCEPT</text>
</g>
<!-- SkippingPrefix.Processing&#45;&gt;SkippingPrefix.Idle -->
<g id="edge13" class="edge">
<title>SkippingPrefix.Processing&#45;&gt;SkippingPrefix.Idle</title>
<path fill="none" stroke="black" d="M1064.61,-36.08C1074.44,-40.7 1083.66,-46.57 1091.37,-54 1101.65,-63.92 1107.13,-78.81 1110.04,-91.84"/>
<polygon fill="black" stroke="black" points="1106.62,-92.56 1111.88,-101.76 1113.5,-91.29 1106.62,-92.56"/>
<text text-anchor="middle" x="1190.37" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
<text text-anchor="middle" x="1190.37" y="-57.8" font-family="Times,serif" font-size="14.00">&#45;&gt; FILTER_SKIP</text>
</g>
<!-- SkippingPrefix.Processing&#45;&gt;NotSkippingPrefixNorVersions.Processing -->
<g id="edge14" class="edge">
<title>SkippingPrefix.Processing&#45;&gt;NotSkippingPrefixNorVersions.Processing</title>
<path fill="none" stroke="black" d="M899.82,-36.01C864.18,-48.2 824.54,-68.57 802.37,-102 771.84,-148.02 779.31,-216.26 786.77,-253.8"/>
<polygon fill="black" stroke="black" points="783.43,-254.92 788.94,-263.97 790.28,-253.46 783.43,-254.92"/>
<text text-anchor="middle" x="899.37" y="-116.3" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
</g>
<!-- SkippingVersions.Processing&#45;&gt;SkippingVersions.Idle -->
<g id="edge15" class="edge">
<title>SkippingVersions.Processing&#45;&gt;SkippingVersions.Idle</title>
<path fill="none" stroke="black" d="M283.88,-36.24C281.71,-50.87 276.4,-71.43 263.37,-84 258.07,-89.11 252.06,-93.48 245.62,-97.21"/>
<polygon fill="black" stroke="black" points="243.85,-94.19 236.61,-101.92 247.09,-100.39 243.85,-94.19"/>
<text text-anchor="middle" x="349.87" y="-72.8" font-family="Times,serif" font-size="14.00">[isVersionKey(key)]</text>
<text text-anchor="middle" x="349.87" y="-57.8" font-family="Times,serif" font-size="14.00">&#45;&gt; FILTER_SKIP</text>
</g>
<!-- SkippingVersions.Processing&#45;&gt;NotSkippingPrefixNorVersions.Processing -->
<g id="edge16" class="edge">
<title>SkippingVersions.Processing&#45;&gt;NotSkippingPrefixNorVersions.Processing</title>
<path fill="none" stroke="black" d="M382.46,-36.08C396.72,-40.7 410.82,-46.57 423.37,-54 476.67,-85.57 487.28,-102.42 518.37,-156 539.39,-192.23 514.46,-218.85 546.37,-246 561.72,-259.06 598.56,-267.25 639.23,-272.39"/>
<polygon fill="black" stroke="black" points="639.01,-275.89 649.36,-273.59 639.84,-268.93 639.01,-275.89"/>
<text text-anchor="middle" x="590.37" y="-116.3" font-family="Times,serif" font-size="14.00">[not isVersionKey(key)]</text>
</g>
<!-- WaitVersionAfterPHD.Processing&#45;&gt;NotSkippingPrefixNorVersions.Processing -->
<g id="edge17" class="edge">
<title>WaitVersionAfterPHD.Processing&#45;&gt;NotSkippingPrefixNorVersions.Processing</title>
<path fill="none" stroke="black" d="M1536.41,-36.13C1544.73,-40.79 1552.27,-46.65 1558.37,-54 1585.64,-86.89 1597.89,-215.12 1568.37,-246 1547.29,-268.05 1167.71,-276.42 947.74,-279.43"/>
<polygon fill="black" stroke="black" points="947.67,-275.93 937.71,-279.57 947.76,-282.93 947.67,-275.93"/>
<text text-anchor="middle" x="1758.37" y="-123.8" font-family="Times,serif" font-size="14.00">[isVersionKey(key) and master(key) == PHDkey]</text>
<text text-anchor="middle" x="1758.37" y="-108.8" font-family="Times,serif" font-size="14.00">/ key &lt;&#45; master(key)</text>
</g>
<!-- WaitVersionAfterPHD.Processing&#45;&gt;NotSkippingPrefixNorVersions.Processing -->
<g id="edge18" class="edge">
<title>WaitVersionAfterPHD.Processing&#45;&gt;NotSkippingPrefixNorVersions.Processing</title>
<path fill="none" stroke="black" d="M1546.51,-21.25C1677.94,-26.54 1888.29,-44.09 1937.37,-102 1947.71,-114.21 1946.85,-125.11 1937.37,-138 1841.62,-268.08 1749.48,-218.23 1590.37,-246 1471.26,-266.79 1143.92,-275.5 947.77,-278.94"/>
<polygon fill="black" stroke="black" points="947.6,-275.44 937.66,-279.11 947.72,-282.44 947.6,-275.44"/>
<text text-anchor="middle" x="2124.87" y="-116.3" font-family="Times,serif" font-size="14.00">[not isVersionKey(key) or master(key) != PHDkey]</text>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 18 KiB

View File

@ -1,35 +0,0 @@
digraph {
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
edge [fontsize=14];
rankdir=TB;
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
node [fillcolor="lightgrey"];
"NotSkipping.Idle" [label="NotSkipping",group="NotSkipping"];
"NeverSkipping.Idle" [label="NeverSkipping",group="NeverSkipping"];
"NotSkippingPrefix.Idle" [label="NotSkippingPrefix",group="NotSkippingPrefix"];
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
node [fillcolor="lightblue"];
"NeverSkipping.Processing" [label="NeverSkipping",group="NeverSkipping"];
"NotSkippingPrefix.Processing" [label="NotSkippingPrefix",group="NotSkippingPrefix"];
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
START -> "NotSkipping.Idle"
"NotSkipping.Idle" -> "NeverSkipping.Idle" [label="[delimiter == undefined]"]
"NotSkipping.Idle" -> "NotSkippingPrefix.Idle" [label="[delimiter == '/']"]
"NeverSkipping.Idle" -> "NeverSkipping.Processing" [label="filter(key, value)"]
"NotSkippingPrefix.Idle" -> "NotSkippingPrefix.Processing" [label="filter(key, value)"]
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
"NeverSkipping.Processing" -> END [label="[nKeys == maxKeys]\n-> FILTER_END"]
"NeverSkipping.Processing" -> "NeverSkipping.Idle" [label="[nKeys < maxKeys]\n/ Contents.append(key, value)\n -> FILTER_ACCEPT"]
"NotSkippingPrefix.Processing" -> END [label="[nKeys == maxKeys]\n -> FILTER_END"]
"NotSkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[nKeys < maxKeys and hasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
"NotSkippingPrefix.Processing" -> "NotSkippingPrefix.Idle" [label="[nKeys < maxKeys and not hasDelimiter(key)]\n/ Contents.append(key, value)\n -> FILTER_ACCEPT"]
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
"SkippingPrefix.Processing" -> "NotSkippingPrefix.Processing" [label="[not key.startsWith(prefix)]"]
}

View File

@ -1,166 +0,0 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<!-- Generated by graphviz version 2.43.0 (0)
-->
<!-- Title: %3 Pages: 1 -->
<svg width="975pt" height="533pt"
viewBox="0.00 0.00 975.00 533.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 529)">
<title>%3</title>
<polygon fill="white" stroke="transparent" points="-4,4 -4,-529 971,-529 971,4 -4,4"/>
<!-- START -->
<g id="node1" class="node">
<title>START</title>
<ellipse fill="black" stroke="black" cx="283" cy="-518" rx="7" ry="7"/>
</g>
<!-- NotSkipping.Idle -->
<g id="node3" class="node">
<title>NotSkipping.Idle</title>
<path fill="lightgrey" stroke="black" d="M379,-474C379,-474 187,-474 187,-474 181,-474 175,-468 175,-462 175,-462 175,-450 175,-450 175,-444 181,-438 187,-438 187,-438 379,-438 379,-438 385,-438 391,-444 391,-450 391,-450 391,-462 391,-462 391,-468 385,-474 379,-474"/>
<text text-anchor="middle" x="283" y="-452.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
</g>
<!-- START&#45;&gt;NotSkipping.Idle -->
<g id="edge1" class="edge">
<title>START&#45;&gt;NotSkipping.Idle</title>
<path fill="none" stroke="black" d="M283,-510.58C283,-504.23 283,-494.07 283,-484.3"/>
<polygon fill="black" stroke="black" points="286.5,-484.05 283,-474.05 279.5,-484.05 286.5,-484.05"/>
</g>
<!-- END -->
<g id="node2" class="node">
<title>END</title>
<ellipse fill="black" stroke="black" cx="196" cy="-120" rx="7" ry="7"/>
<ellipse fill="none" stroke="black" cx="196" cy="-120" rx="11" ry="11"/>
</g>
<!-- NeverSkipping.Idle -->
<g id="node4" class="node">
<title>NeverSkipping.Idle</title>
<path fill="lightgrey" stroke="black" d="M262,-387C262,-387 70,-387 70,-387 64,-387 58,-381 58,-375 58,-375 58,-363 58,-363 58,-357 64,-351 70,-351 70,-351 262,-351 262,-351 268,-351 274,-357 274,-363 274,-363 274,-375 274,-375 274,-381 268,-387 262,-387"/>
<text text-anchor="middle" x="166" y="-365.2" font-family="Times,serif" font-size="16.00">NeverSkipping</text>
</g>
<!-- NotSkipping.Idle&#45;&gt;NeverSkipping.Idle -->
<g id="edge2" class="edge">
<title>NotSkipping.Idle&#45;&gt;NeverSkipping.Idle</title>
<path fill="none" stroke="black" d="M216.5,-437.82C206.51,-433.18 196.91,-427.34 189,-420 182.25,-413.74 177.33,-405.11 173.81,-396.79"/>
<polygon fill="black" stroke="black" points="177.05,-395.47 170.3,-387.31 170.49,-397.9 177.05,-395.47"/>
<text text-anchor="middle" x="279.5" y="-408.8" font-family="Times,serif" font-size="14.00">[delimiter == undefined]</text>
</g>
<!-- NotSkippingPrefix.Idle -->
<g id="node5" class="node">
<title>NotSkippingPrefix.Idle</title>
<path fill="lightgrey" stroke="black" d="M496,-387C496,-387 304,-387 304,-387 298,-387 292,-381 292,-375 292,-375 292,-363 292,-363 292,-357 298,-351 304,-351 304,-351 496,-351 496,-351 502,-351 508,-357 508,-363 508,-363 508,-375 508,-375 508,-381 502,-387 496,-387"/>
<text text-anchor="middle" x="400" y="-365.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefix</text>
</g>
<!-- NotSkipping.Idle&#45;&gt;NotSkippingPrefix.Idle -->
<g id="edge3" class="edge">
<title>NotSkipping.Idle&#45;&gt;NotSkippingPrefix.Idle</title>
<path fill="none" stroke="black" d="M340.77,-437.93C351.2,-433.2 361.45,-427.29 370,-420 377.58,-413.53 383.76,-404.65 388.51,-396.16"/>
<polygon fill="black" stroke="black" points="391.63,-397.74 393.08,-387.24 385.4,-394.54 391.63,-397.74"/>
<text text-anchor="middle" x="442.5" y="-408.8" font-family="Times,serif" font-size="14.00">[delimiter == &#39;/&#39;]</text>
</g>
<!-- NeverSkipping.Processing -->
<g id="node7" class="node">
<title>NeverSkipping.Processing</title>
<path fill="lightblue" stroke="black" d="M204,-270C204,-270 12,-270 12,-270 6,-270 0,-264 0,-258 0,-258 0,-246 0,-246 0,-240 6,-234 12,-234 12,-234 204,-234 204,-234 210,-234 216,-240 216,-246 216,-246 216,-258 216,-258 216,-264 210,-270 204,-270"/>
<text text-anchor="middle" x="108" y="-248.2" font-family="Times,serif" font-size="16.00">NeverSkipping</text>
</g>
<!-- NeverSkipping.Idle&#45;&gt;NeverSkipping.Processing -->
<g id="edge4" class="edge">
<title>NeverSkipping.Idle&#45;&gt;NeverSkipping.Processing</title>
<path fill="none" stroke="black" d="M64.1,-350.93C47.33,-346.11 33.58,-340.17 28,-333 15.72,-317.21 17.05,-304.74 28,-288 30.93,-283.52 34.58,-279.6 38.69,-276.19"/>
<polygon fill="black" stroke="black" points="40.97,-278.86 47.1,-270.22 36.92,-273.16 40.97,-278.86"/>
<text text-anchor="middle" x="86" y="-306.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
</g>
<!-- NotSkippingPrefix.Processing -->
<g id="node8" class="node">
<title>NotSkippingPrefix.Processing</title>
<path fill="lightblue" stroke="black" d="M554,-270C554,-270 362,-270 362,-270 356,-270 350,-264 350,-258 350,-258 350,-246 350,-246 350,-240 356,-234 362,-234 362,-234 554,-234 554,-234 560,-234 566,-240 566,-246 566,-246 566,-258 566,-258 566,-264 560,-270 554,-270"/>
<text text-anchor="middle" x="458" y="-248.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefix</text>
</g>
<!-- NotSkippingPrefix.Idle&#45;&gt;NotSkippingPrefix.Processing -->
<g id="edge5" class="edge">
<title>NotSkippingPrefix.Idle&#45;&gt;NotSkippingPrefix.Processing</title>
<path fill="none" stroke="black" d="M395.69,-350.84C392.38,-333.75 390.03,-307.33 401,-288 403.42,-283.74 406.58,-279.94 410.19,-276.55"/>
<polygon fill="black" stroke="black" points="412.5,-279.18 418.1,-270.18 408.11,-273.73 412.5,-279.18"/>
<text text-anchor="middle" x="459" y="-306.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
</g>
<!-- SkippingPrefix.Idle -->
<g id="node6" class="node">
<title>SkippingPrefix.Idle</title>
<path fill="lightgrey" stroke="black" d="M554,-138C554,-138 362,-138 362,-138 356,-138 350,-132 350,-126 350,-126 350,-114 350,-114 350,-108 356,-102 362,-102 362,-102 554,-102 554,-102 560,-102 566,-108 566,-114 566,-114 566,-126 566,-126 566,-132 560,-138 554,-138"/>
<text text-anchor="middle" x="458" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
</g>
<!-- SkippingPrefix.Processing -->
<g id="node9" class="node">
<title>SkippingPrefix.Processing</title>
<path fill="lightblue" stroke="black" d="M691,-36C691,-36 499,-36 499,-36 493,-36 487,-30 487,-24 487,-24 487,-12 487,-12 487,-6 493,0 499,0 499,0 691,0 691,0 697,0 703,-6 703,-12 703,-12 703,-24 703,-24 703,-30 697,-36 691,-36"/>
<text text-anchor="middle" x="595" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
</g>
<!-- SkippingPrefix.Idle&#45;&gt;SkippingPrefix.Processing -->
<g id="edge6" class="edge">
<title>SkippingPrefix.Idle&#45;&gt;SkippingPrefix.Processing</title>
<path fill="none" stroke="black" d="M452.35,-101.95C448.76,-87.65 446.54,-67.45 457,-54 461.44,-48.29 471.08,-43.36 483.3,-39.15"/>
<polygon fill="black" stroke="black" points="484.61,-42.41 493.1,-36.07 482.51,-35.73 484.61,-42.41"/>
<text text-anchor="middle" x="515" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
</g>
<!-- NeverSkipping.Processing&#45;&gt;END -->
<g id="edge7" class="edge">
<title>NeverSkipping.Processing&#45;&gt;END</title>
<path fill="none" stroke="black" d="M102.91,-233.88C97.93,-213.45 93.18,-179.15 109,-156 123.79,-134.35 154.41,-126.09 175.08,-122.94"/>
<polygon fill="black" stroke="black" points="175.62,-126.4 185.11,-121.69 174.76,-119.45 175.62,-126.4"/>
<text text-anchor="middle" x="185" y="-189.8" font-family="Times,serif" font-size="14.00">[nKeys == maxKeys]</text>
<text text-anchor="middle" x="185" y="-174.8" font-family="Times,serif" font-size="14.00">&#45;&gt; FILTER_END</text>
</g>
<!-- NeverSkipping.Processing&#45;&gt;NeverSkipping.Idle -->
<g id="edge8" class="edge">
<title>NeverSkipping.Processing&#45;&gt;NeverSkipping.Idle</title>
<path fill="none" stroke="black" d="M129.49,-270.27C134.87,-275.48 140.18,-281.55 144,-288 153.56,-304.17 159.09,-324.63 162.21,-340.81"/>
<polygon fill="black" stroke="black" points="158.78,-341.49 163.94,-350.74 165.68,-340.29 158.78,-341.49"/>
<text text-anchor="middle" x="265.5" y="-321.8" font-family="Times,serif" font-size="14.00">[nKeys &lt; maxKeys]</text>
<text text-anchor="middle" x="265.5" y="-306.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
<text text-anchor="middle" x="265.5" y="-291.8" font-family="Times,serif" font-size="14.00"> &#45;&gt; FILTER_ACCEPT</text>
</g>
<!-- NotSkippingPrefix.Processing&#45;&gt;END -->
<g id="edge9" class="edge">
<title>NotSkippingPrefix.Processing&#45;&gt;END</title>
<path fill="none" stroke="black" d="M349.96,-237.93C333,-232.81 316.36,-225.74 302,-216 275.27,-197.87 285.01,-177.6 261,-156 247.64,-143.98 229.41,-134.62 215.65,-128.62"/>
<polygon fill="black" stroke="black" points="216.74,-125.28 206.16,-124.7 214.07,-131.75 216.74,-125.28"/>
<text text-anchor="middle" x="378" y="-189.8" font-family="Times,serif" font-size="14.00">[nKeys == maxKeys]</text>
<text text-anchor="middle" x="378" y="-174.8" font-family="Times,serif" font-size="14.00"> &#45;&gt; FILTER_END</text>
</g>
<!-- NotSkippingPrefix.Processing&#45;&gt;NotSkippingPrefix.Idle -->
<g id="edge11" class="edge">
<title>NotSkippingPrefix.Processing&#45;&gt;NotSkippingPrefix.Idle</title>
<path fill="none" stroke="black" d="M499.64,-270.11C506.59,-274.86 512.87,-280.76 517,-288 526.9,-305.38 528.94,-316.96 517,-333 513.56,-337.62 509.53,-341.66 505.07,-345.18"/>
<polygon fill="black" stroke="black" points="502.89,-342.43 496.63,-350.98 506.85,-348.2 502.89,-342.43"/>
<text text-anchor="middle" x="690.5" y="-321.8" font-family="Times,serif" font-size="14.00">[nKeys &lt; maxKeys and not hasDelimiter(key)]</text>
<text text-anchor="middle" x="690.5" y="-306.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
<text text-anchor="middle" x="690.5" y="-291.8" font-family="Times,serif" font-size="14.00"> &#45;&gt; FILTER_ACCEPT</text>
</g>
<!-- NotSkippingPrefix.Processing&#45;&gt;SkippingPrefix.Idle -->
<g id="edge10" class="edge">
<title>NotSkippingPrefix.Processing&#45;&gt;SkippingPrefix.Idle</title>
<path fill="none" stroke="black" d="M458,-233.74C458,-211.98 458,-174.32 458,-148.56"/>
<polygon fill="black" stroke="black" points="461.5,-148.33 458,-138.33 454.5,-148.33 461.5,-148.33"/>
<text text-anchor="middle" x="609.5" y="-204.8" font-family="Times,serif" font-size="14.00">[nKeys &lt; maxKeys and hasDelimiter(key)]</text>
<text text-anchor="middle" x="609.5" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix &lt;&#45; prefixOf(key)</text>
<text text-anchor="middle" x="609.5" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
<text text-anchor="middle" x="609.5" y="-159.8" font-family="Times,serif" font-size="14.00">&#45;&gt; FILTER_ACCEPT</text>
</g>
<!-- SkippingPrefix.Processing&#45;&gt;SkippingPrefix.Idle -->
<g id="edge12" class="edge">
<title>SkippingPrefix.Processing&#45;&gt;SkippingPrefix.Idle</title>
<path fill="none" stroke="black" d="M593.49,-36.23C591.32,-50.84 586,-71.39 573,-84 567.75,-89.09 561.77,-93.45 555.38,-97.17"/>
<polygon fill="black" stroke="black" points="553.66,-94.12 546.43,-101.87 556.91,-100.32 553.66,-94.12"/>
<text text-anchor="middle" x="672" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
<text text-anchor="middle" x="672" y="-57.8" font-family="Times,serif" font-size="14.00">&#45;&gt; FILTER_SKIP</text>
</g>
<!-- SkippingPrefix.Processing&#45;&gt;NotSkippingPrefix.Processing -->
<g id="edge13" class="edge">
<title>SkippingPrefix.Processing&#45;&gt;NotSkippingPrefix.Processing</title>
<path fill="none" stroke="black" d="M703.16,-31.64C728.6,-36.87 750.75,-44.11 759,-54 778.46,-77.34 776.26,-200.01 762,-216 749.37,-230.17 656.13,-239.42 576.2,-244.84"/>
<polygon fill="black" stroke="black" points="575.77,-241.36 566.03,-245.51 576.24,-248.34 575.77,-241.36"/>
<text text-anchor="middle" x="870" y="-116.3" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 12 KiB

View File

@ -1,9 +1,6 @@
import * as evaluators from './lib/policyEvaluator/evaluator'; import * as evaluators from './lib/policyEvaluator/evaluator';
import evaluatePrincipal from './lib/policyEvaluator/principal'; import evaluatePrincipal from './lib/policyEvaluator/principal';
import RequestContext, { import RequestContext from './lib/policyEvaluator/RequestContext';
actionNeedQuotaCheck,
actionNeedQuotaCheckCopy,
actionWithDataDeletion } from './lib/policyEvaluator/RequestContext';
import * as requestUtils from './lib/policyEvaluator/requestUtils'; import * as requestUtils from './lib/policyEvaluator/requestUtils';
import * as actionMaps from './lib/policyEvaluator/utils/actionMaps'; import * as actionMaps from './lib/policyEvaluator/utils/actionMaps';
import { validateUserPolicy } from './lib/policy/policyValidator' import { validateUserPolicy } from './lib/policy/policyValidator'
@ -28,7 +25,6 @@ import * as objectRestore from './lib/s3middleware/objectRestore';
import * as lifecycleHelpers from './lib/s3middleware/lifecycleHelpers'; import * as lifecycleHelpers from './lib/s3middleware/lifecycleHelpers';
export { default as errors } from './lib/errors'; export { default as errors } from './lib/errors';
export { default as Clustering } from './lib/Clustering'; export { default as Clustering } from './lib/Clustering';
export * as ClusterRPC from './lib/clustering/ClusterRPC';
export * as ipCheck from './lib/ipCheck'; export * as ipCheck from './lib/ipCheck';
export * as auth from './lib/auth/auth'; export * as auth from './lib/auth/auth';
export * as constants from './lib/constants'; export * as constants from './lib/constants';
@ -52,8 +48,6 @@ export const algorithms = {
Skip: require('./lib/algos/list/skip'), Skip: require('./lib/algos/list/skip'),
}, },
cache: { cache: {
GapSet: require('./lib/algos/cache/GapSet'),
GapCache: require('./lib/algos/cache/GapCache'),
LRUCache: require('./lib/algos/cache/LRUCache'), LRUCache: require('./lib/algos/cache/LRUCache'),
}, },
stream: { stream: {
@ -70,9 +64,6 @@ export const policies = {
RequestContext, RequestContext,
requestUtils, requestUtils,
actionMaps, actionMaps,
actionNeedQuotaCheck,
actionWithDataDeletion,
actionNeedQuotaCheckCopy,
}; };
export const testing = { export const testing = {

View File

@ -1,363 +0,0 @@
import { OrderedSet } from '@js-sdsl/ordered-set';
import {
default as GapSet,
GapSetEntry,
} from './GapSet';
// the API is similar but is not strictly a superset of GapSetInterface
// so we don't extend from it
export interface GapCacheInterface {
exposureDelayMs: number;
maxGapWeight: number;
size: number;
setGap: (firstKey: string, lastKey: string, weight: number) => void;
removeOverlappingGaps: (overlappingKeys: string[]) => number;
lookupGap: (minKey: string, maxKey?: string) => Promise<GapSetEntry | null>;
[Symbol.iterator]: () => Iterator<GapSetEntry>;
toArray: () => GapSetEntry[];
};
class GapCacheUpdateSet {
newGaps: GapSet;
updatedKeys: OrderedSet<string>;
constructor(maxGapWeight: number) {
this.newGaps = new GapSet(maxGapWeight);
this.updatedKeys = new OrderedSet();
}
addUpdateBatch(updatedKeys: OrderedSet<string>): void {
this.updatedKeys.union(updatedKeys);
}
};
/**
* Cache of listing "gaps" i.e. ranges of keys that can be skipped
* over during listing (because they only contain delete markers as
* latest versions).
*
* Typically, a single GapCache instance would be attached to a raft session.
*
* The API usage is as follows:
*
* - Initialize a GapCache instance by calling start() (this starts an internal timer)
*
* - Insert a gap or update an existing one via setGap()
*
* - Lookup existing gaps via lookupGap()
*
* - Invalidate gaps that overlap a specific set of keys via removeOverlappingGaps()
*
* - Shut down a GapCache instance by calling stop() (this stops the internal timer)
*
* Gaps inserted via setGap() are not exposed immediately to lookupGap(), but only:
*
* - after a certain delay always larger than 'exposureDelayMs' and usually shorter
* than twice this value (but might be slightly longer in rare cases)
*
* - and only if they haven't been invalidated by a recent call to removeOverlappingGaps()
*
* This ensures atomicity between gap creation and invalidation from updates under
* the condition that a gap is created from first key to last key within the time defined
* by 'exposureDelayMs'.
*
* The implementation is based on two extra temporary "update sets" on top of the main
* exposed gap set, one called "staging" and the other "frozen", each containing a
* temporary updated gap set and a list of updated keys to invalidate gaps with (coming
* from calls to removeOverlappingGaps()). Every "exposureDelayMs" milliseconds, the frozen
* gaps are invalidated by all key updates coming from either of the "staging" or "frozen"
* update set, then merged into the exposed gaps set, after which the staging updates become
* the frozen updates and won't receive any new gap until the next cycle.
*/
export default class GapCache implements GapCacheInterface {
_exposureDelayMs: number;
maxGaps: number;
_stagingUpdates: GapCacheUpdateSet;
_frozenUpdates: GapCacheUpdateSet;
_exposedGaps: GapSet;
_exposeFrozenInterval: NodeJS.Timeout | null;
/**
* @constructor
*
* @param {number} exposureDelayMs - minimum delay between
* insertion of a gap via setGap() and its exposure via
* lookupGap()
* @param {number} maxGaps - maximum number of cached gaps, after
* which no new gap can be added by setGap(). (Note: a future
* improvement could replace this by an eviction strategy)
* @param {number} maxGapWeight - maximum "weight" of individual
* cached gaps, which is also the granularity for
* invalidation. Individual gaps can be chained together,
* which lookupGap() transparently consolidates in the response
* into a single large gap.
*/
constructor(exposureDelayMs: number, maxGaps: number, maxGapWeight: number) {
this._exposureDelayMs = exposureDelayMs;
this.maxGaps = maxGaps;
this._stagingUpdates = new GapCacheUpdateSet(maxGapWeight);
this._frozenUpdates = new GapCacheUpdateSet(maxGapWeight);
this._exposedGaps = new GapSet(maxGapWeight);
this._exposeFrozenInterval = null;
}
/**
* Create a GapCache from an array of exposed gap entries (used in tests)
*
* @return {GapCache} - a new GapCache instance
*/
static createFromArray(
gaps: GapSetEntry[],
exposureDelayMs: number,
maxGaps: number,
maxGapWeight: number
): GapCache {
const gapCache = new GapCache(exposureDelayMs, maxGaps, maxGapWeight);
gapCache._exposedGaps = GapSet.createFromArray(gaps, maxGapWeight)
return gapCache;
}
/**
* Internal helper to remove gaps in the staging and frozen sets
* overlapping with previously updated keys, right before the
* frozen gaps get exposed.
*
* @return {undefined}
*/
_removeOverlappingGapsBeforeExpose(): void {
for (const { updatedKeys } of [this._stagingUpdates, this._frozenUpdates]) {
if (updatedKeys.size() === 0) {
continue;
}
for (const { newGaps } of [this._stagingUpdates, this._frozenUpdates]) {
if (newGaps.size === 0) {
continue;
}
newGaps.removeOverlappingGaps(updatedKeys);
}
}
}
/**
* This function is the core mechanism that updates the exposed gaps in the
* cache. It is called on a regular interval defined by 'exposureDelayMs'.
*
* It does the following in order:
*
* - remove gaps from the frozen set that overlap with any key present in a
* batch passed to removeOverlappingGaps() since the last two triggers of
* _exposeFrozen()
*
* - merge the remaining gaps from the frozen set to the exposed set, which
* makes them visible from calls to lookupGap()
*
* - rotate by freezing the currently staging updates and initiating a new
* staging updates set
*
* @return {undefined}
*/
_exposeFrozen(): void {
this._removeOverlappingGapsBeforeExpose();
for (const gap of this._frozenUpdates.newGaps) {
// Use a trivial strategy to keep the cache size within
// limits: refuse to add new gaps when the size is above
// the 'maxGaps' threshold. We solely rely on
// removeOverlappingGaps() to make space for new gaps.
if (this._exposedGaps.size < this.maxGaps) {
this._exposedGaps.setGap(gap.firstKey, gap.lastKey, gap.weight);
}
}
this._frozenUpdates = this._stagingUpdates;
this._stagingUpdates = new GapCacheUpdateSet(this.maxGapWeight);
}
/**
* Start the internal GapCache timer
*
* @return {undefined}
*/
start(): void {
if (this._exposeFrozenInterval) {
return;
}
this._exposeFrozenInterval = setInterval(
() => this._exposeFrozen(),
this._exposureDelayMs);
}
/**
* Stop the internal GapCache timer
*
* @return {undefined}
*/
stop(): void {
if (this._exposeFrozenInterval) {
clearInterval(this._exposeFrozenInterval);
this._exposeFrozenInterval = null;
}
}
/**
* Record a gap between two keys, associated with a weight to
* limit individual gap's spanning ranges in the cache, for a more
* granular invalidation.
*
* The function handles splitting and merging existing gaps to
* maintain an optimal weight of cache entries.
*
* NOTE 1: the caller must ensure that the full length of the gap
* between 'firstKey' and 'lastKey' has been built from a listing
* snapshot that is more recent than 'exposureDelayMs' milliseconds,
* in order to guarantee that the exposed gap will be fully
* covered (and potentially invalidated) from recent calls to
* removeOverlappingGaps().
*
* NOTE 2: a usual pattern when building a large gap from multiple
* calls to setGap() is to start the next gap from 'lastKey',
* which will be passed as 'firstKey' in the next call, so that
* gaps can be chained together and consolidated by lookupGap().
*
* @param {string} firstKey - first key of the gap
* @param {string} lastKey - last key of the gap, must be greater
* or equal than 'firstKey'
* @param {number} weight - total weight between 'firstKey' and 'lastKey'
* @return {undefined}
*/
setGap(firstKey: string, lastKey: string, weight: number): void {
this._stagingUpdates.newGaps.setGap(firstKey, lastKey, weight);
}
/**
* Remove gaps that overlap with a given set of keys. Used to
* invalidate gaps when keys are inserted or deleted.
*
* @param {OrderedSet<string> | string[]} overlappingKeys - remove gaps that
* overlap with any of this set of keys
* @return {number} - how many gaps were removed from the exposed
* gaps only (overlapping gaps not yet exposed are also invalidated
* but are not accounted for in the returned value)
*/
removeOverlappingGaps(overlappingKeys: OrderedSet<string> | string[]): number {
let overlappingKeysSet;
if (Array.isArray(overlappingKeys)) {
overlappingKeysSet = new OrderedSet(overlappingKeys);
} else {
overlappingKeysSet = overlappingKeys;
}
this._stagingUpdates.addUpdateBatch(overlappingKeysSet);
return this._exposedGaps.removeOverlappingGaps(overlappingKeysSet);
}
/**
* Lookup the next exposed gap that overlaps with [minKey, maxKey]. Internally
* chained gaps are coalesced in the response into a single contiguous large gap.
*
* @param {string} minKey - minimum key overlapping with the returned gap
* @param {string} [maxKey] - maximum key overlapping with the returned gap
* @return {Promise<GapSetEntry | null>} - result of the lookup if a gap
* was found, null otherwise, as a Promise
*/
lookupGap(minKey: string, maxKey?: string): Promise<GapSetEntry | null> {
return this._exposedGaps.lookupGap(minKey, maxKey);
}
/**
* Get the maximum weight setting for individual gaps.
*
* @return {number} - maximum weight of individual gaps
*/
get maxGapWeight(): number {
return this._exposedGaps.maxWeight;
}
/**
* Set the maximum weight setting for individual gaps.
*
* @param {number} gapWeight - maximum weight of individual gaps
*/
set maxGapWeight(gapWeight: number) {
this._exposedGaps.maxWeight = gapWeight;
// also update transient gap sets
this._stagingUpdates.newGaps.maxWeight = gapWeight;
this._frozenUpdates.newGaps.maxWeight = gapWeight;
}
/**
* Get the exposure delay in milliseconds, which is the minimum
* time after which newly cached gaps will be exposed by
* lookupGap().
*
* @return {number} - exposure delay in milliseconds
*/
get exposureDelayMs(): number {
return this._exposureDelayMs;
}
/**
* Set the exposure delay in milliseconds, which is the minimum
* time after which newly cached gaps will be exposed by
* lookupGap(). Setting this attribute automatically updates the
* internal state to honor the new value.
*
* @param {number} - exposure delay in milliseconds
*/
set exposureDelayMs(exposureDelayMs: number) {
if (exposureDelayMs !== this._exposureDelayMs) {
this._exposureDelayMs = exposureDelayMs;
if (this._exposeFrozenInterval) {
// invalidate all pending gap updates, as the new interval may not be
// safe for them
this._stagingUpdates = new GapCacheUpdateSet(this.maxGapWeight);
this._frozenUpdates = new GapCacheUpdateSet(this.maxGapWeight);
// reinitialize the _exposeFrozenInterval timer with the updated delay
this.stop();
this.start();
}
}
}
/**
* Get the number of exposed gaps
*
* @return {number} number of exposed gaps
*/
get size(): number {
return this._exposedGaps.size;
}
/**
* Iterate over exposed gaps
*
* @return {Iterator<GapSetEntry>} an iterator over exposed gaps
*/
[Symbol.iterator](): Iterator<GapSetEntry> {
return this._exposedGaps[Symbol.iterator]();
}
/**
* Get an array of all exposed gaps
*
* @return {GapSetEntry[]} array of exposed gaps
*/
toArray(): GapSetEntry[] {
return this._exposedGaps.toArray();
}
/**
* Clear all exposed and staging gaps from the cache.
*
* Note: retains invalidating updates from removeOverlappingGaps()
* for correctness of gaps inserted afterwards.
*
* @return {undefined}
*/
clear(): void {
this._stagingUpdates.newGaps = new GapSet(this.maxGapWeight);
this._frozenUpdates.newGaps = new GapSet(this.maxGapWeight);
this._exposedGaps = new GapSet(this.maxGapWeight);
}
}

View File

@ -1,366 +0,0 @@
import assert from 'assert';
import { OrderedSet } from '@js-sdsl/ordered-set';
import errors from '../../errors';
export type GapSetEntry = {
firstKey: string,
lastKey: string,
weight: number,
};
export interface GapSetInterface {
maxWeight: number;
size: number;
setGap: (firstKey: string, lastKey: string, weight: number) => GapSetEntry;
removeOverlappingGaps: (overlappingKeys: string[]) => number;
lookupGap: (minKey: string, maxKey?: string) => Promise<GapSetEntry | null>;
[Symbol.iterator]: () => Iterator<GapSetEntry>;
toArray: () => GapSetEntry[];
};
/**
* Specialized data structure to support caching of listing "gaps",
* i.e. ranges of keys that can be skipped over during listing
* (because they only contain delete markers as latest versions)
*/
export default class GapSet implements GapSetInterface, Iterable<GapSetEntry> {
_gaps: OrderedSet<GapSetEntry>;
_maxWeight: number;
/**
* @constructor
* @param {number} maxWeight - weight threshold for each cached
* gap (unitless). Triggers splitting gaps when reached
*/
constructor(maxWeight: number) {
this._gaps = new OrderedSet(
[],
(left: GapSetEntry, right: GapSetEntry) => (
left.firstKey < right.firstKey ? -1 :
left.firstKey > right.firstKey ? 1 : 0
)
);
this._maxWeight = maxWeight;
}
/**
* Create a GapSet from an array of gap entries (used in tests)
*/
static createFromArray(gaps: GapSetEntry[], maxWeight: number): GapSet {
const gapSet = new GapSet(maxWeight);
for (const gap of gaps) {
gapSet._gaps.insert(gap);
}
return gapSet;
}
/**
* Record a gap between two keys, associated with a weight to limit
* individual gap sizes in the cache.
*
* The function handles splitting and merging existing gaps to
* maintain an optimal weight of cache entries.
*
* @param {string} firstKey - first key of the gap
* @param {string} lastKey - last key of the gap, must be greater
* or equal than 'firstKey'
* @param {number} weight - total weight between 'firstKey' and 'lastKey'
* @return {GapSetEntry} - existing or new gap entry
*/
setGap(firstKey: string, lastKey: string, weight: number): GapSetEntry {
assert(lastKey >= firstKey);
// Step 1/4: Find the closest left-overlapping gap, and either re-use it
// or chain it with a new gap depending on the weights if it exists (otherwise
// just creates a new gap).
const curGapIt = this._gaps.reverseLowerBound(<GapSetEntry>{ firstKey });
let curGap;
if (curGapIt.isAccessible()) {
curGap = curGapIt.pointer;
if (curGap.lastKey >= lastKey) {
// return fully overlapping gap already cached
return curGap;
}
}
let remainingWeight = weight;
if (!curGap // no previous gap
|| curGap.lastKey < firstKey // previous gap not overlapping
|| (curGap.lastKey === firstKey // previous gap overlapping by one key...
&& curGap.weight + weight > this._maxWeight) // ...but we can't extend it
) {
// create a new gap indexed by 'firstKey'
curGap = { firstKey, lastKey: firstKey, weight: 0 };
this._gaps.insert(curGap);
} else if (curGap.lastKey > firstKey && weight > this._maxWeight) {
// previous gap is either fully or partially contained in the new gap
// and cannot be extended: substract its weight from the total (heuristic
// in case the previous gap doesn't start at 'firstKey', which is the
// uncommon case)
remainingWeight -= curGap.weight;
// there may be an existing chained gap starting with the previous gap's
// 'lastKey': use it if it exists
const chainedGapIt = this._gaps.find(<GapSetEntry>{ firstKey: curGap.lastKey });
if (chainedGapIt.isAccessible()) {
curGap = chainedGapIt.pointer;
} else {
// no existing chained gap: chain a new gap to the previous gap
curGap = {
firstKey: curGap.lastKey,
lastKey: curGap.lastKey,
weight: 0,
};
this._gaps.insert(curGap);
}
}
// Step 2/4: Cleanup existing gaps fully included in firstKey -> lastKey, and
// aggregate their weights in curGap to define the minimum weight up to the
// last merged gap.
let nextGap;
while (true) {
const nextGapIt = this._gaps.upperBound(<GapSetEntry>{ firstKey: curGap.firstKey });
nextGap = nextGapIt.isAccessible() && nextGapIt.pointer;
// stop the cleanup when no more gap or if the next gap is not fully
// included in curGap
if (!nextGap || nextGap.lastKey > lastKey) {
break;
}
this._gaps.eraseElementByIterator(nextGapIt);
curGap.lastKey = nextGap.lastKey;
curGap.weight += nextGap.weight;
}
// Step 3/4: Extend curGap to lastKey, adjusting the weight.
// At this point, curGap weight is the minimum weight of the finished gap, save it
// for step 4.
let minMergedWeight = curGap.weight;
if (curGap.lastKey === firstKey && firstKey !== lastKey) {
// extend the existing gap by the full amount 'firstKey -> lastKey'
curGap.lastKey = lastKey;
curGap.weight += remainingWeight;
} else if (curGap.lastKey <= lastKey) {
curGap.lastKey = lastKey;
curGap.weight = remainingWeight;
}
// Step 4/4: Find the closest right-overlapping gap, and if it exists, either merge
// it or chain it with curGap depending on the weights.
if (nextGap && nextGap.firstKey <= lastKey) {
// nextGap overlaps with the new gap: check if we can merge it
minMergedWeight += nextGap.weight;
let mergedWeight;
if (lastKey === nextGap.firstKey) {
// nextGap is chained with curGap: add the full weight of nextGap
mergedWeight = curGap.weight + nextGap.weight;
} else {
// strict overlap: don't add nextGap's weight unless
// it's larger than the sum of merged ranges (as it is
// then included in `minMergedWeight`)
mergedWeight = Math.max(curGap.weight, minMergedWeight);
}
if (mergedWeight <= this._maxWeight) {
// merge nextGap into curGap
curGap.lastKey = nextGap.lastKey;
curGap.weight = mergedWeight;
this._gaps.eraseElementByKey(nextGap);
} else {
// adjust the last key to chain with nextGap and substract the next
// gap's weight from curGap (heuristic)
curGap.lastKey = nextGap.firstKey;
curGap.weight = Math.max(mergedWeight - nextGap.weight, 0);
curGap = nextGap;
}
}
// return a copy of curGap
return Object.assign({}, curGap);
}
/**
* Remove gaps that overlap with one or more keys in a given array or
* OrderedSet. Used to invalidate gaps when keys are inserted or deleted.
*
* @param {OrderedSet<string> | string[]} overlappingKeys - remove gaps that overlap
* with any of this set of keys
* @return {number} - how many gaps were removed
*/
removeOverlappingGaps(overlappingKeys: OrderedSet<string> | string[]): number {
// To optimize processing with a large number of keys and/or gaps, this function:
//
// 1. converts the overlappingKeys array to a OrderedSet (if not already a OrderedSet)
// 2. queries both the gaps set and the overlapping keys set in a loop, which allows:
// - skipping ranges of overlapping keys at once when there is no new overlapping gap
// - skipping ranges of gaps at once when there is no overlapping key
//
// This way, it is efficient when the number of non-overlapping gaps is large
// (which is the most common case in practice).
let overlappingKeysSet;
if (Array.isArray(overlappingKeys)) {
overlappingKeysSet = new OrderedSet(overlappingKeys);
} else {
overlappingKeysSet = overlappingKeys;
}
const firstKeyIt = overlappingKeysSet.begin();
let currentKey = firstKeyIt.isAccessible() && firstKeyIt.pointer;
let nRemoved = 0;
while (currentKey) {
const closestGapIt = this._gaps.reverseUpperBound(<GapSetEntry>{ firstKey: currentKey });
if (closestGapIt.isAccessible()) {
const closestGap = closestGapIt.pointer;
if (currentKey <= closestGap.lastKey) {
// currentKey overlaps closestGap: remove the gap
this._gaps.eraseElementByIterator(closestGapIt);
nRemoved += 1;
}
}
const nextGapIt = this._gaps.lowerBound(<GapSetEntry>{ firstKey: currentKey });
if (!nextGapIt.isAccessible()) {
// no more gap: we're done
return nRemoved;
}
const nextGap = nextGapIt.pointer;
// advance to the last key potentially overlapping with nextGap
let currentKeyIt = overlappingKeysSet.reverseLowerBound(nextGap.lastKey);
if (currentKeyIt.isAccessible()) {
currentKey = currentKeyIt.pointer;
if (currentKey >= nextGap.firstKey) {
// currentKey overlaps nextGap: remove the gap
this._gaps.eraseElementByIterator(nextGapIt);
nRemoved += 1;
}
}
// advance to the first key potentially overlapping with another gap
currentKeyIt = overlappingKeysSet.lowerBound(nextGap.lastKey);
currentKey = currentKeyIt.isAccessible() && currentKeyIt.pointer;
}
return nRemoved;
}
/**
* Internal helper to coalesce multiple chained gaps into a single gap.
*
* It is only used to construct lookupGap() return values and
* doesn't modify the GapSet.
*
* NOTE: The function may take a noticeable amount of time and CPU
* to execute if a large number of chained gaps have to be
* coalesced, but it should never take more than a few seconds. In
* most cases it should take less than a millisecond. It regularly
* yields to the nodejs event loop to avoid blocking it during a
* long execution.
*
* @param {GapSetEntry} firstGap - first gap of the chain to coalesce with
* the next ones in the chain
* @return {Promise<GapSetEntry>} - a new coalesced entry, as a Promise
*/
_coalesceGapChain(firstGap: GapSetEntry): Promise<GapSetEntry> {
return new Promise(resolve => {
const coalescedGap: GapSetEntry = Object.assign({}, firstGap);
const coalesceGapChainIteration = () => {
// efficiency trade-off: 100 iterations of log(N) complexity lookups should
// not block the event loop for too long
for (let opCounter = 0; opCounter < 100; ++opCounter) {
const chainedGapIt = this._gaps.find(
<GapSetEntry>{ firstKey: coalescedGap.lastKey });
if (!chainedGapIt.isAccessible()) {
// chain is complete
return resolve(coalescedGap);
}
const chainedGap = chainedGapIt.pointer;
if (chainedGap.firstKey === chainedGap.lastKey) {
// found a single-key gap: chain is complete
return resolve(coalescedGap);
}
coalescedGap.lastKey = chainedGap.lastKey;
coalescedGap.weight += chainedGap.weight;
}
// yield to the event loop before continuing the process
// of coalescing the gap chain
return process.nextTick(coalesceGapChainIteration);
};
coalesceGapChainIteration();
});
}
/**
* Lookup the next gap that overlaps with [minKey, maxKey]. Internally chained
* gaps are coalesced in the response into a single contiguous large gap.
*
* @param {string} minKey - minimum key overlapping with the returned gap
* @param {string} [maxKey] - maximum key overlapping with the returned gap
* @return {Promise<GapSetEntry | null>} - result of the lookup if a gap
* was found, null otherwise, as a Promise
*/
async lookupGap(minKey: string, maxKey?: string): Promise<GapSetEntry | null> {
let firstGap: GapSetEntry | null = null;
const minGapIt = this._gaps.reverseLowerBound(<GapSetEntry>{ firstKey: minKey });
const minGap = minGapIt.isAccessible() && minGapIt.pointer;
if (minGap && minGap.lastKey >= minKey) {
firstGap = minGap;
} else {
const maxGapIt = this._gaps.upperBound(<GapSetEntry>{ firstKey: minKey });
const maxGap = maxGapIt.isAccessible() && maxGapIt.pointer;
if (maxGap && (maxKey === undefined || maxGap.firstKey <= maxKey)) {
firstGap = maxGap;
}
}
if (!firstGap) {
return null;
}
return this._coalesceGapChain(firstGap);
}
/**
* Get the maximum weight setting for individual gaps.
*
* @return {number} - maximum weight of individual gaps
*/
get maxWeight(): number {
return this._maxWeight;
}
/**
* Set the maximum weight setting for individual gaps.
*
* @param {number} gapWeight - maximum weight of individual gaps
*/
set maxWeight(gapWeight: number) {
this._maxWeight = gapWeight;
}
/**
* Get the number of gaps stored in this set.
*
* @return {number} - number of gaps stored in this set
*/
get size(): number {
return this._gaps.size();
}
/**
* Iterate over each gap of the set, ordered by first key
*
* @return {Iterator<GapSetEntry>} - an iterator over all gaps
* Example:
* for (const gap of myGapSet) { ... }
*/
[Symbol.iterator](): Iterator<GapSetEntry> {
return this._gaps[Symbol.iterator]();
}
/**
* Return an array containing all gaps, ordered by first key
*
* NOTE: there is a toArray() method in the OrderedSet implementation
* but it does not scale well and overflows the stack quickly. This is
* why we provide an implementation based on an iterator.
*
* @return {GapSetEntry[]} - an array containing all gaps
*/
toArray(): GapSetEntry[] {
return [...this];
}
}

View File

@ -1,6 +1,6 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const { FILTER_ACCEPT, SKIP_NONE } = require('./tools'); const { FILTER_SKIP, SKIP_NONE } = require('./tools');
// Use a heuristic to amortize the cost of JSON // Use a heuristic to amortize the cost of JSON
// serialization/deserialization only on largest metadata where the // serialization/deserialization only on largest metadata where the
@ -92,26 +92,21 @@ class Extension {
* @param {object} entry - a listing entry from metadata * @param {object} entry - a listing entry from metadata
* expected format: { key, value } * expected format: { key, value }
* @return {number} - result of filtering the entry: * @return {number} - result of filtering the entry:
* FILTER_ACCEPT: entry is accepted and may or not be included * > 0: entry is accepted and included in the result
* in the result * = 0: entry is accepted but not included (skipping)
* FILTER_SKIP: listing may skip directly (with "gte" param) to * < 0: entry is not accepted, listing should finish
* the key returned by the skipping() method
* FILTER_END: the results are complete, listing can be stopped
*/ */
filter(/* entry: { key, value } */) { filter(entry) {
return FILTER_ACCEPT; return entry ? FILTER_SKIP : FILTER_SKIP;
} }
/** /**
* Provides the next key at which the listing task is allowed to skip to. * Provides the insight into why filter is skipping an entry. This could be
* This could allow to skip over: * because it is skipping a range of delimited keys or a range of specific
* - a key prefix ending with the delimiter * version when doing master version listing.
* - all remaining versions of an object when doing a current
* versions listing in v0 format
* - a cached "gap" of deleted objects when doing a current
* versions listing in v0 format
* *
* @return {string} - the next key at which the listing task is allowed to skip to * @return {string} - the insight: a common prefix or a master key,
* or SKIP_NONE if there is no insight
*/ */
skipping() { skipping() {
return SKIP_NONE; return SKIP_NONE;

View File

@ -1,7 +1,7 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const { inc, checkLimit, listingParamsMasterKeysV0ToV1, const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT, SKIP_NONE } = require('./tools'); FILTER_END, FILTER_ACCEPT } = require('./tools');
const DEFAULT_MAX_KEYS = 1000; const DEFAULT_MAX_KEYS = 1000;
const VSConst = require('../../versioning/constants').VersioningConstants; const VSConst = require('../../versioning/constants').VersioningConstants;
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst; const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
@ -163,7 +163,7 @@ class MultipartUploads {
} }
skipping() { skipping() {
return SKIP_NONE; return '';
} }
/** /**

View File

@ -2,7 +2,7 @@
const Extension = require('./Extension').default; const Extension = require('./Extension').default;
const { checkLimit, FILTER_END, FILTER_ACCEPT } = require('./tools'); const { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
const DEFAULT_MAX_KEYS = 10000; const DEFAULT_MAX_KEYS = 10000;
/** /**
@ -91,7 +91,7 @@ class List extends Extension {
* < 0 : listing done * < 0 : listing done
*/ */
filter(elem) { filter(elem) {
// Check if the result array is full // Check first in case of maxkeys <= 0
if (this.keys >= this.maxKeys) { if (this.keys >= this.maxKeys) {
return FILTER_END; return FILTER_END;
} }
@ -99,7 +99,7 @@ class List extends Extension {
this.filterKeyStartsWith !== undefined) && this.filterKeyStartsWith !== undefined) &&
typeof elem === 'object' && typeof elem === 'object' &&
!this.customFilter(elem.value)) { !this.customFilter(elem.value)) {
return FILTER_ACCEPT; return FILTER_SKIP;
} }
if (typeof elem === 'object') { if (typeof elem === 'object') {
this.res.push({ this.res.push({

274
lib/algos/list/delimiter.js Normal file
View File

@ -0,0 +1,274 @@
'use strict'; // eslint-disable-line strict
const Extension = require('./Extension').default;
const { inc, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
const VSConst = require('../../versioning/constants').VersioningConstants;
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
/**
* Find the common prefix in the path
*
* @param {String} key - path of the object
* @param {String} delimiter - separator
* @param {Number} delimiterIndex - 'folder' index in the path
* @return {String} - CommonPrefix
*/
function getCommonPrefix(key, delimiter, delimiterIndex) {
return key.substring(0, delimiterIndex + delimiter.length);
}
/**
* Handle object listing with parameters
*
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
* @prop {String[]} Contents - 'files' to list
* @prop {Boolean} IsTruncated - truncated listing flag
* @prop {String|undefined} NextMarker - marker per amazon format
* @prop {Number} keys - count of listed keys
* @prop {String|undefined} delimiter - separator per amazon format
* @prop {String|undefined} prefix - prefix per amazon format
* @prop {Number} maxKeys - number of keys to list
*/
class Delimiter extends Extension {
/**
* Create a new Delimiter instance
* @constructor
* @param {Object} parameters - listing parameters
* @param {String} [parameters.delimiter] - delimiter per amazon
* format
* @param {String} [parameters.prefix] - prefix per amazon
* format
* @param {String} [parameters.marker] - marker per amazon
* format
* @param {Number} [parameters.maxKeys] - number of keys to list
* @param {Boolean} [parameters.v2] - indicates whether v2
* format
* @param {String} [parameters.startAfter] - marker per amazon
* format
* @param {String} [parameters.continuationToken] - obfuscated amazon
* token
* @param {Boolean} [parameters.alphabeticalOrder] - Either the result is
* alphabetically ordered
* or not
* @param {RequestLogger} logger - The logger of the
* request
* @param {String} [vFormat] - versioning key format
*/
constructor(parameters, logger, vFormat) {
super(parameters, logger);
// original listing parameters
this.delimiter = parameters.delimiter;
this.prefix = parameters.prefix;
this.marker = parameters.marker;
this.maxKeys = parameters.maxKeys || 1000;
this.startAfter = parameters.startAfter;
this.continuationToken = parameters.continuationToken;
this.alphabeticalOrder =
typeof parameters.alphabeticalOrder !== 'undefined' ?
parameters.alphabeticalOrder : true;
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
// results
this.CommonPrefixes = [];
this.Contents = [];
this.IsTruncated = false;
this.NextMarker = parameters.marker;
this.NextContinuationToken =
parameters.continuationToken || parameters.startAfter;
this.startMarker = parameters.v2 ? 'startAfter' : 'marker';
this.continueMarker = parameters.v2 ? 'continuationToken' : 'marker';
this.nextContinueMarker = parameters.v2 ?
'NextContinuationToken' : 'NextMarker';
if (this.delimiter !== undefined &&
this[this.nextContinueMarker] !== undefined &&
this[this.nextContinueMarker].startsWith(this.prefix || '')) {
const nextDelimiterIndex =
this[this.nextContinueMarker].indexOf(this.delimiter,
this.prefix ? this.prefix.length : 0);
this[this.nextContinueMarker] =
this[this.nextContinueMarker].slice(0, nextDelimiterIndex +
this.delimiter.length);
}
Object.assign(this, {
[BucketVersioningKeyFormat.v0]: {
genMDParams: this.genMDParamsV0,
getObjectKey: this.getObjectKeyV0,
skipping: this.skippingV0,
},
[BucketVersioningKeyFormat.v1]: {
genMDParams: this.genMDParamsV1,
getObjectKey: this.getObjectKeyV1,
skipping: this.skippingV1,
},
}[this.vFormat]);
}
genMDParamsV0() {
const params = {};
if (this.prefix) {
params.gte = this.prefix;
params.lt = inc(this.prefix);
}
const startVal = this[this.continueMarker] || this[this.startMarker];
if (startVal) {
if (params.gte && params.gte > startVal) {
return params;
}
delete params.gte;
params.gt = startVal;
}
return params;
}
genMDParamsV1() {
const params = this.genMDParamsV0();
return listingParamsMasterKeysV0ToV1(params);
}
/**
* check if the max keys count has been reached and set the
* final state of the result if it is the case
* @return {Boolean} - indicates if the iteration has to stop
*/
_reachedMaxKeys() {
if (this.keys >= this.maxKeys) {
// In cases of maxKeys <= 0 -> IsTruncated = false
this.IsTruncated = this.maxKeys > 0;
return true;
}
return false;
}
/**
* Add a (key, value) tuple to the listing
* Set the NextMarker to the current key
* Increment the keys counter
* @param {String} key - The key to add
* @param {String} value - The value of the key
* @return {number} - indicates if iteration should continue
*/
addContents(key, value) {
if (this._reachedMaxKeys()) {
return FILTER_END;
}
this.Contents.push({ key, value: this.trimMetadata(value) });
this[this.nextContinueMarker] = key;
++this.keys;
return FILTER_ACCEPT;
}
getObjectKeyV0(obj) {
return obj.key;
}
getObjectKeyV1(obj) {
return obj.key.slice(DbPrefixes.Master.length);
}
/**
* Filter to apply on each iteration, based on:
* - prefix
* - delimiter
* - maxKeys
* The marker is being handled directly by levelDB
* @param {Object} obj - The key and value of the element
* @param {String} obj.key - The key of the element
* @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue
*/
filter(obj) {
const key = this.getObjectKey(obj);
const value = obj.value;
if ((this.prefix && !key.startsWith(this.prefix))
|| (this.alphabeticalOrder
&& typeof this[this.nextContinueMarker] === 'string'
&& key <= this[this.nextContinueMarker])) {
return FILTER_SKIP;
}
if (this.delimiter) {
const baseIndex = this.prefix ? this.prefix.length : 0;
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
if (delimiterIndex === -1) {
return this.addContents(key, value);
}
return this.addCommonPrefix(key, delimiterIndex);
}
return this.addContents(key, value);
}
/**
* Add a Common Prefix in the list
* @param {String} key - object name
* @param {Number} index - after prefix starting point
* @return {Boolean} - indicates if iteration should continue
*/
addCommonPrefix(key, index) {
const commonPrefix = getCommonPrefix(key, this.delimiter, index);
if (this.CommonPrefixes.indexOf(commonPrefix) === -1
&& this[this.nextContinueMarker] !== commonPrefix) {
if (this._reachedMaxKeys()) {
return FILTER_END;
}
this.CommonPrefixes.push(commonPrefix);
this[this.nextContinueMarker] = commonPrefix;
++this.keys;
return FILTER_ACCEPT;
}
return FILTER_SKIP;
}
/**
* If repd happens to want to skip listing on a bucket in v0
* versioning key format, here is an idea.
*
* @return {string} - the present range (NextMarker) if repd believes
* that it's enough and should move on
*/
skippingV0() {
return this[this.nextContinueMarker];
}
/**
* If repd happens to want to skip listing on a bucket in v1
* versioning key format, here is an idea.
*
* @return {string} - the present range (NextMarker) if repd believes
* that it's enough and should move on
*/
skippingV1() {
return DbPrefixes.Master + this[this.nextContinueMarker];
}
/**
* Return an object containing all mandatory fields to use once the
* iteration is done, doesn't show a NextMarker field if the output
* isn't truncated
* @return {Object} - following amazon format
*/
result() {
/* NextMarker is only provided when delimiter is used.
* specified in v1 listing documentation
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
*/
const result = {
CommonPrefixes: this.CommonPrefixes,
Contents: this.Contents,
IsTruncated: this.IsTruncated,
Delimiter: this.delimiter,
};
if (this.parameters.v2) {
result.NextContinuationToken = this.IsTruncated
? this.NextContinuationToken : undefined;
} else {
result.NextMarker = (this.IsTruncated && this.delimiter)
? this.NextMarker : undefined;
}
return result;
}
}
module.exports = { Delimiter };

View File

@ -1,356 +0,0 @@
'use strict'; // eslint-disable-line strict
const Extension = require('./Extension').default;
const { inc, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
const VSConst = require('../../versioning/constants').VersioningConstants;
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
export interface FilterState {
id: number,
};
export interface FilterReturnValue {
FILTER_ACCEPT,
FILTER_SKIP,
FILTER_END,
};
export const enum DelimiterFilterStateId {
NotSkipping = 1,
SkippingPrefix = 2,
};
export interface DelimiterFilterState_NotSkipping extends FilterState {
id: DelimiterFilterStateId.NotSkipping,
};
export interface DelimiterFilterState_SkippingPrefix extends FilterState {
id: DelimiterFilterStateId.SkippingPrefix,
prefix: string;
};
type KeyHandler = (key: string, value: string) => FilterReturnValue;
export type ResultObject = {
CommonPrefixes: string[];
Contents: {
key: string;
value: string;
}[];
IsTruncated: boolean;
Delimiter ?: string;
NextMarker ?: string;
NextContinuationToken ?: string;
};
/**
* Handle object listing with parameters
*
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
* @prop {String[]} Contents - 'files' to list
* @prop {Boolean} IsTruncated - truncated listing flag
* @prop {String|undefined} NextMarker - marker per amazon format
* @prop {Number} keys - count of listed keys
* @prop {String|undefined} delimiter - separator per amazon format
* @prop {String|undefined} prefix - prefix per amazon format
* @prop {Number} maxKeys - number of keys to list
*/
export class Delimiter extends Extension {
state: FilterState;
keyHandlers: { [id: number]: KeyHandler };
/**
* Create a new Delimiter instance
* @constructor
* @param {Object} parameters - listing parameters
* @param {String} [parameters.delimiter] - delimiter per amazon
* format
* @param {String} [parameters.prefix] - prefix per amazon
* format
* @param {String} [parameters.marker] - marker per amazon
* format
* @param {Number} [parameters.maxKeys] - number of keys to list
* @param {Boolean} [parameters.v2] - indicates whether v2
* format
* @param {String} [parameters.startAfter] - marker per amazon
* format
* @param {String} [parameters.continuationToken] - obfuscated amazon
* token
* @param {RequestLogger} logger - The logger of the
* request
* @param {String} [vFormat] - versioning key format
*/
constructor(parameters, logger, vFormat) {
super(parameters, logger);
// original listing parameters
this.delimiter = parameters.delimiter;
this.prefix = parameters.prefix;
this.maxKeys = parameters.maxKeys || 1000;
if (parameters.v2) {
this.marker = parameters.continuationToken || parameters.startAfter;
} else {
this.marker = parameters.marker;
}
this.nextMarker = this.marker;
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
// results
this.CommonPrefixes = [];
this.Contents = [];
this.IsTruncated = false;
this.keyHandlers = {};
Object.assign(this, {
[BucketVersioningKeyFormat.v0]: {
genMDParams: this.genMDParamsV0,
getObjectKey: this.getObjectKeyV0,
skipping: this.skippingV0,
},
[BucketVersioningKeyFormat.v1]: {
genMDParams: this.genMDParamsV1,
getObjectKey: this.getObjectKeyV1,
skipping: this.skippingV1,
},
}[this.vFormat]);
// if there is a delimiter, we may skip ranges by prefix,
// hence using the NotSkippingPrefix flavor that checks the
// subprefix up to the delimiter for the NotSkipping state
if (this.delimiter) {
this.setKeyHandler(
DelimiterFilterStateId.NotSkipping,
this.keyHandler_NotSkippingPrefix.bind(this));
} else {
// listing without a delimiter never has to skip over any
// prefix -> use NeverSkipping flavor for the NotSkipping
// state
this.setKeyHandler(
DelimiterFilterStateId.NotSkipping,
this.keyHandler_NeverSkipping.bind(this));
}
this.setKeyHandler(
DelimiterFilterStateId.SkippingPrefix,
this.keyHandler_SkippingPrefix.bind(this));
this.state = <DelimiterFilterState_NotSkipping> {
id: DelimiterFilterStateId.NotSkipping,
};
}
genMDParamsV0() {
const params: { gt ?: string, gte ?: string, lt ?: string } = {};
if (this.prefix) {
params.gte = this.prefix;
params.lt = inc(this.prefix);
}
if (this.marker && this.delimiter) {
const commonPrefix = this.getCommonPrefix(this.marker);
if (commonPrefix) {
const afterPrefix = inc(commonPrefix);
if (!params.gte || afterPrefix > params.gte) {
params.gte = afterPrefix;
}
}
}
if (this.marker && (!params.gte || this.marker >= params.gte)) {
delete params.gte;
params.gt = this.marker;
}
return params;
}
genMDParamsV1() {
const params = this.genMDParamsV0();
return listingParamsMasterKeysV0ToV1(params);
}
/**
* check if the max keys count has been reached and set the
* final state of the result if it is the case
* @return {Boolean} - indicates if the iteration has to stop
*/
_reachedMaxKeys(): boolean {
if (this.keys >= this.maxKeys) {
// In cases of maxKeys <= 0 -> IsTruncated = false
this.IsTruncated = this.maxKeys > 0;
return true;
}
return false;
}
/**
* Add a (key, value) tuple to the listing
* Set the NextMarker to the current key
* Increment the keys counter
* @param {String} key - The key to add
* @param {String} value - The value of the key
* @return {number} - indicates if iteration should continue
*/
addContents(key: string, value: string): void {
this.Contents.push({ key, value: this.trimMetadata(value) });
++this.keys;
this.nextMarker = key;
}
getCommonPrefix(key: string): string | undefined {
if (!this.delimiter) {
return undefined;
}
const baseIndex = this.prefix ? this.prefix.length : 0;
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
if (delimiterIndex === -1) {
return undefined;
}
return key.substring(0, delimiterIndex + this.delimiter.length);
}
/**
* Add a Common Prefix in the list
* @param {String} commonPrefix - common prefix to add
* @param {String} key - full key starting with commonPrefix
* @return {Boolean} - indicates if iteration should continue
*/
addCommonPrefix(commonPrefix: string, key: string): void {
// add the new prefix to the list
this.CommonPrefixes.push(commonPrefix);
++this.keys;
this.nextMarker = commonPrefix;
}
addCommonPrefixOrContents(key: string, value: string): string | undefined {
// add the subprefix to the common prefixes if the key has the delimiter
const commonPrefix = this.getCommonPrefix(key);
if (commonPrefix) {
this.addCommonPrefix(commonPrefix, key);
return commonPrefix;
}
this.addContents(key, value);
return undefined;
}
getObjectKeyV0(obj: { key: string }): string {
return obj.key;
}
getObjectKeyV1(obj: { key: string }): string {
return obj.key.slice(DbPrefixes.Master.length);
}
/**
* Filter to apply on each iteration, based on:
* - prefix
* - delimiter
* - maxKeys
* The marker is being handled directly by levelDB
* @param {Object} obj - The key and value of the element
* @param {String} obj.key - The key of the element
* @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue
*/
filter(obj: { key: string, value: string }): FilterReturnValue {
const key = this.getObjectKey(obj);
const value = obj.value;
return this.handleKey(key, value);
}
setState(state: FilterState): void {
this.state = state;
}
setKeyHandler(stateId: number, keyHandler: KeyHandler): void {
this.keyHandlers[stateId] = keyHandler;
}
handleKey(key: string, value: string): FilterReturnValue {
return this.keyHandlers[this.state.id](key, value);
}
keyHandler_NeverSkipping(key: string, value: string): FilterReturnValue {
if (this._reachedMaxKeys()) {
return FILTER_END;
}
this.addContents(key, value);
return FILTER_ACCEPT;
}
keyHandler_NotSkippingPrefix(key: string, value: string): FilterReturnValue {
if (this._reachedMaxKeys()) {
return FILTER_END;
}
const commonPrefix = this.addCommonPrefixOrContents(key, value);
if (commonPrefix) {
// transition into SkippingPrefix state to skip all following keys
// while they start with the same prefix
this.setState(<DelimiterFilterState_SkippingPrefix> {
id: DelimiterFilterStateId.SkippingPrefix,
prefix: commonPrefix,
});
}
return FILTER_ACCEPT;
}
keyHandler_SkippingPrefix(key: string, value: string): FilterReturnValue {
const { prefix } = <DelimiterFilterState_SkippingPrefix> this.state;
if (key.startsWith(prefix)) {
return FILTER_SKIP;
}
this.setState(<DelimiterFilterState_NotSkipping> {
id: DelimiterFilterStateId.NotSkipping,
});
return this.handleKey(key, value);
}
skippingBase(): string | undefined {
switch (this.state.id) {
case DelimiterFilterStateId.SkippingPrefix:
const { prefix } = <DelimiterFilterState_SkippingPrefix> this.state;
return inc(prefix);
default:
return SKIP_NONE;
}
}
skippingV0() {
return this.skippingBase();
}
skippingV1() {
const skipTo = this.skippingBase();
if (skipTo === SKIP_NONE) {
return SKIP_NONE;
}
return DbPrefixes.Master + skipTo;
}
/**
* Return an object containing all mandatory fields to use once the
* iteration is done, doesn't show a NextMarker field if the output
* isn't truncated
* @return {Object} - following amazon format
*/
result(): ResultObject {
/* NextMarker is only provided when delimiter is used.
* specified in v1 listing documentation
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
*/
const result: ResultObject = {
CommonPrefixes: this.CommonPrefixes,
Contents: this.Contents,
IsTruncated: this.IsTruncated,
Delimiter: this.delimiter,
};
if (this.parameters.v2) {
result.NextContinuationToken = this.IsTruncated
? this.nextMarker : undefined;
} else {
result.NextMarker = (this.IsTruncated && this.delimiter)
? this.nextMarker : undefined;
}
return result;
}
}

View File

@ -1,5 +1,4 @@
const { DelimiterMaster } = require('./delimiterMaster'); const { Delimiter } = require('./delimiter');
const { FILTER_ACCEPT, FILTER_END } = require('./tools');
type ResultObject = { type ResultObject = {
Contents: { Contents: {
@ -11,16 +10,14 @@ type ResultObject = {
}; };
/** /**
* Handle object listing with parameters. This extends the base class DelimiterMaster * Handle object listing with parameters. This extends the base class Delimiter
* to return the master/current versions. * to return the master/current versions.
*/ */
class DelimiterCurrent extends DelimiterMaster { class DelimiterCurrent extends Delimiter {
/** /**
* Delimiter listing of current versions. * Delimiter listing of current versions.
* @param {Object} parameters - listing parameters * @param {Object} parameters - listing parameters
* @param {String} parameters.beforeDate - limit the response to keys older than beforeDate * @param {String} parameters.beforeDate - limit the response to keys older than beforeDate
* @param {String} parameters.excludedDataStoreName - excluded datatore name
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
* @param {RequestLogger} logger - The logger of the request * @param {RequestLogger} logger - The logger of the request
* @param {String} [vFormat] - versioning key format * @param {String} [vFormat] - versioning key format
*/ */
@ -28,97 +25,27 @@ class DelimiterCurrent extends DelimiterMaster {
super(parameters, logger, vFormat); super(parameters, logger, vFormat);
this.beforeDate = parameters.beforeDate; this.beforeDate = parameters.beforeDate;
this.excludedDataStoreName = parameters.excludedDataStoreName;
this.maxScannedLifecycleListingEntries = parameters.maxScannedLifecycleListingEntries;
this.scannedKeys = 0;
} }
genMDParamsV0() { genMDParamsV1() {
const params = super.genMDParamsV0(); const params = super.genMDParamsV1();
// lastModified and dataStoreName parameters are used by metadata that enables built-in filtering,
// a feature currently exclusive to MongoDB
if (this.beforeDate) { if (this.beforeDate) {
params.lastModified = { params.lastModified = {
lt: this.beforeDate, lt: this.beforeDate,
}; };
} }
if (this.excludedDataStoreName) {
params.dataStoreName = {
ne: this.excludedDataStoreName,
}
}
return params; return params;
} }
/** result(): ResultObject {
* Parses the stringified entry's value.
* @param s - sringified value
* @return - undefined if parsing fails, otherwise it contains the parsed value.
*/
_parse(s) {
let p;
try {
p = JSON.parse(s);
} catch (e: any) {
this.logger.warn(
'Could not parse Object Metadata while listing',
{ err: e.toString() });
}
return p;
}
/**
* check if the max keys count has been reached and set the
* final state of the result if it is the case
*
* specialized implementation on DelimiterCurrent to also check
* the number of scanned keys
*
* @return {Boolean} - indicates if the iteration has to stop
*/
_reachedMaxKeys(): boolean {
if (this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries) {
this.IsTruncated = true;
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
{
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
scannedKeys: this.scannedKeys,
});
return true;
}
return super._reachedMaxKeys();
}
addContents(key, value) {
++this.scannedKeys;
const parsedValue = this._parse(value);
// if parsing fails, skip the key.
if (parsedValue) {
const lastModified = parsedValue['last-modified'];
const dataStoreName = parsedValue.dataStoreName;
// We then check if the current version is older than the "beforeDate" and
// "excludedDataStoreName" is not specified or if specified and the data store name is different.
if ((!this.beforeDate || (lastModified && lastModified < this.beforeDate)) &&
(!this.excludedDataStoreName || dataStoreName !== this.excludedDataStoreName)) {
super.addContents(key, value);
}
// In the event of a timeout occurring before any content is added,
// NextMarker is updated even if the object is not eligible.
// It minimizes the amount of data that the client needs to re-process if the request times out.
this.nextMarker = key;
}
}
result(): object {
const result: ResultObject = { const result: ResultObject = {
Contents: this.Contents, Contents: this.Contents,
IsTruncated: this.IsTruncated, IsTruncated: this.IsTruncated,
}; };
if (this.IsTruncated) { if (this.IsTruncated) {
result.NextMarker = this.nextMarker; result.NextMarker = this.NextMarker;
} }
return result; return result;

View File

@ -0,0 +1,196 @@
'use strict'; // eslint-disable-line strict
const Delimiter = require('./delimiter').Delimiter;
const Version = require('../../versioning/Version').Version;
const VSConst = require('../../versioning/constants').VersioningConstants;
const { BucketVersioningKeyFormat } = VSConst;
const { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes } = VSConst;
/**
* Handle object listing with parameters. This extends the base class Delimiter
* to return the raw master versions of existing objects.
*/
class DelimiterMaster extends Delimiter {
/**
* Delimiter listing of master versions.
* @param {Object} parameters - listing parameters
* @param {String} parameters.delimiter - delimiter per amazon format
* @param {String} parameters.prefix - prefix per amazon format
* @param {String} parameters.marker - marker per amazon format
* @param {Number} parameters.maxKeys - number of keys to list
* @param {Boolean} parameters.v2 - indicates whether v2 format
* @param {String} parameters.startAfter - marker per amazon v2 format
* @param {String} parameters.continuationToken - obfuscated amazon token
* @param {RequestLogger} logger - The logger of the request
* @param {String} [vFormat] - versioning key format
*/
constructor(parameters, logger, vFormat) {
super(parameters, logger, vFormat);
// non-PHD master version or a version whose master is a PHD version
this.prvKey = undefined;
this.prvPHDKey = undefined;
this.inReplayPrefix = false;
Object.assign(this, {
[BucketVersioningKeyFormat.v0]: {
filter: this.filterV0,
skipping: this.skippingV0,
},
[BucketVersioningKeyFormat.v1]: {
filter: this.filterV1,
skipping: this.skippingV1,
},
}[this.vFormat]);
}
/**
* Filter to apply on each iteration for buckets in v0 format,
* based on:
* - prefix
* - delimiter
* - maxKeys
* The marker is being handled directly by levelDB
* @param {Object} obj - The key and value of the element
* @param {String} obj.key - The key of the element
* @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue
*/
filterV0(obj) {
let key = obj.key;
const value = obj.value;
if (key.startsWith(DbPrefixes.Replay)) {
this.inReplayPrefix = true;
return FILTER_SKIP;
}
this.inReplayPrefix = false;
/* Skip keys not starting with the prefix or not alphabetically
* ordered. */
if ((this.prefix && !key.startsWith(this.prefix))
|| (typeof this[this.nextContinueMarker] === 'string' &&
key <= this[this.nextContinueMarker])) {
return FILTER_SKIP;
}
/* Skip version keys (<key><versionIdSeparator><version>) if we already
* have a master version. */
const versionIdIndex = key.indexOf(VID_SEP);
if (versionIdIndex >= 0) {
key = key.slice(0, versionIdIndex);
/* - key === this.prvKey is triggered when a master version has
* been accepted for this key,
* - key === this.NextMarker or this.NextContinueToken is triggered
* when a listing page ends on an accepted obj and the next page
* starts with a version of this object.
* In that case prvKey is default set to undefined
* in the constructor and comparing to NextMarker is the only
* way to know we should not accept this version. This test is
* not redundant with the one at the beginning of this function,
* we are comparing here the key without the version suffix,
* - key startsWith the previous NextMarker happens because we set
* NextMarker to the common prefix instead of the whole key
* value. (TODO: remove this test once ZENKO-1048 is fixed)
* */
if (key === this.prvKey || key === this[this.nextContinueMarker] ||
(this.delimiter &&
key.startsWith(this[this.nextContinueMarker]))) {
/* master version already filtered */
return FILTER_SKIP;
}
}
if (Version.isPHD(value)) {
/* master version is a PHD version, we want to wait for the next
* one:
* - Set the prvKey to undefined to not skip the next version,
* - return accept to avoid users to skip the next values in range
* (skip scan mechanism in metadata backend like Metadata or
* MongoClient). */
this.prvKey = undefined;
this.prvPHDKey = key;
return FILTER_ACCEPT;
}
if (Version.isDeleteMarker(value)) {
/* This entry is a deleteMarker which has not been filtered by the
* version test. Either :
* - it is a deleteMarker on the master version, we want to SKIP
* all the following entries with this key (no master version),
* - or a deleteMarker following a PHD (setting prvKey to undefined
* when an entry is a PHD avoids the skip on version for the
* next entry). In that case we expect the master version to
* follow. */
if (key === this.prvPHDKey) {
this.prvKey = undefined;
return FILTER_ACCEPT;
}
this.prvKey = key;
return FILTER_SKIP;
}
this.prvKey = key;
if (this.delimiter) {
// check if the key has the delimiter
const baseIndex = this.prefix ? this.prefix.length : 0;
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
if (delimiterIndex >= 0) {
// try to add the prefix to the list
return this.addCommonPrefix(key, delimiterIndex);
}
}
return this.addContents(key, value);
}
/**
* Filter to apply on each iteration for buckets in v1 format,
* based on:
* - prefix
* - delimiter
* - maxKeys
* The marker is being handled directly by levelDB
* @param {Object} obj - The key and value of the element
* @param {String} obj.key - The key of the element
* @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue
*/
filterV1(obj) {
// Filtering master keys in v1 is simply listing the master
// keys, as the state of version keys do not change the
// result, so we can use Delimiter method directly.
return super.filter(obj);
}
skippingBase() {
if (this[this.nextContinueMarker]) {
// next marker or next continuation token:
// - foo/ : skipping foo/
// - foo : skipping foo.
const index = this[this.nextContinueMarker].
lastIndexOf(this.delimiter);
if (index === this[this.nextContinueMarker].length - 1) {
return this[this.nextContinueMarker];
}
return this[this.nextContinueMarker] + VID_SEP;
}
return SKIP_NONE;
}
skippingV0() {
if (this.inReplayPrefix) {
return DbPrefixes.Replay;
}
return this.skippingBase();
}
skippingV1() {
const skipTo = this.skippingBase();
if (skipTo === SKIP_NONE) {
return SKIP_NONE;
}
return DbPrefixes.Master + skipTo;
}
}
module.exports = { DelimiterMaster };

View File

@ -1,620 +0,0 @@
import {
Delimiter,
FilterState,
FilterReturnValue,
DelimiterFilterStateId,
DelimiterFilterState_NotSkipping,
DelimiterFilterState_SkippingPrefix,
ResultObject,
} from './delimiter';
const Version = require('../../versioning/Version').Version;
const VSConst = require('../../versioning/constants').VersioningConstants;
const { BucketVersioningKeyFormat } = VSConst;
const { FILTER_ACCEPT, FILTER_SKIP, FILTER_END, SKIP_NONE, inc } = require('./tools');
import { GapSetEntry } from '../cache/GapSet';
import { GapCacheInterface } from '../cache/GapCache';
const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes } = VSConst;
export const enum DelimiterMasterFilterStateId {
SkippingVersionsV0 = 101,
WaitVersionAfterPHDV0 = 102,
SkippingGapV0 = 103,
};
interface DelimiterMasterFilterState_SkippingVersionsV0 extends FilterState {
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
masterKey: string,
};
interface DelimiterMasterFilterState_WaitVersionAfterPHDV0 extends FilterState {
id: DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
masterKey: string,
};
interface DelimiterMasterFilterState_SkippingGapV0 extends FilterState {
id: DelimiterMasterFilterStateId.SkippingGapV0,
};
export const enum GapCachingState {
NoGapCache = 0, // there is no gap cache
UnknownGap = 1, // waiting for a cache lookup
GapLookupInProgress = 2, // asynchronous gap lookup in progress
GapCached = 3, // an upcoming or already skippable gap is cached
NoMoreGap = 4, // the cache doesn't have any more gaps inside the listed range
};
type GapCachingInfo_NoGapCache = {
state: GapCachingState.NoGapCache;
};
type GapCachingInfo_NoCachedGap = {
state: GapCachingState.UnknownGap
| GapCachingState.GapLookupInProgress
gapCache: GapCacheInterface;
};
type GapCachingInfo_GapCached = {
state: GapCachingState.GapCached;
gapCache: GapCacheInterface;
gapCached: GapSetEntry;
};
type GapCachingInfo_NoMoreGap = {
state: GapCachingState.NoMoreGap;
};
type GapCachingInfo = GapCachingInfo_NoGapCache
| GapCachingInfo_NoCachedGap
| GapCachingInfo_GapCached
| GapCachingInfo_NoMoreGap;
export const enum GapBuildingState {
Disabled = 0, // no gap cache or no gap building needed (e.g. in V1 versioning format)
NotBuilding = 1, // not currently building a gap (i.e. not listing within a gap)
Building = 2, // currently building a gap (i.e. listing within a gap)
Expired = 3, // not allowed to build due to exposure delay timeout
};
type GapBuildingInfo_NothingToBuild = {
state: GapBuildingState.Disabled | GapBuildingState.Expired;
};
type GapBuildingParams = {
/**
* minimum weight for a gap to be created in the cache
*/
minGapWeight: number;
/**
* trigger a cache setGap() call every N skippable keys
*/
triggerSaveGapWeight: number;
/**
* timestamp to assess whether we're still inside the validity period to
* be allowed to build gaps
*/
initTimestamp: number;
};
type GapBuildingInfo_NotBuilding = {
state: GapBuildingState.NotBuilding;
gapCache: GapCacheInterface;
params: GapBuildingParams;
};
type GapBuildingInfo_Building = {
state: GapBuildingState.Building;
gapCache: GapCacheInterface;
params: GapBuildingParams;
/**
* Gap currently being created
*/
gap: GapSetEntry;
/**
* total current weight of the gap being created
*/
gapWeight: number;
};
type GapBuildingInfo = GapBuildingInfo_NothingToBuild
| GapBuildingInfo_NotBuilding
| GapBuildingInfo_Building;
/**
* Handle object listing with parameters. This extends the base class Delimiter
* to return the raw master versions of existing objects.
*/
export class DelimiterMaster extends Delimiter {
_gapCaching: GapCachingInfo;
_gapBuilding: GapBuildingInfo;
_refreshedBuildingParams: GapBuildingParams | null;
/**
* Delimiter listing of master versions.
* @param {Object} parameters - listing parameters
* @param {String} [parameters.delimiter] - delimiter per amazon format
* @param {String} [parameters.prefix] - prefix per amazon format
* @param {String} [parameters.marker] - marker per amazon format
* @param {Number} [parameters.maxKeys] - number of keys to list
* @param {Boolean} [parameters.v2] - indicates whether v2 format
* @param {String} [parameters.startAfter] - marker per amazon v2 format
* @param {String} [parameters.continuationToken] - obfuscated amazon token
* @param {RequestLogger} logger - The logger of the request
* @param {String} [vFormat="v0"] - versioning key format
*/
constructor(parameters, logger, vFormat?: string) {
super(parameters, logger, vFormat);
if (this.vFormat === BucketVersioningKeyFormat.v0) {
// override Delimiter's implementation of NotSkipping for
// DelimiterMaster logic (skipping versions and special
// handling of delete markers and PHDs)
this.setKeyHandler(
DelimiterFilterStateId.NotSkipping,
this.keyHandler_NotSkippingPrefixNorVersionsV0.bind(this));
// add extra state handlers specific to DelimiterMaster with v0 format
this.setKeyHandler(
DelimiterMasterFilterStateId.SkippingVersionsV0,
this.keyHandler_SkippingVersionsV0.bind(this));
this.setKeyHandler(
DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
this.keyHandler_WaitVersionAfterPHDV0.bind(this));
this.setKeyHandler(
DelimiterMasterFilterStateId.SkippingGapV0,
this.keyHandler_SkippingGapV0.bind(this));
if (this.marker) {
// distinct initial state to include some special logic
// before the first master key is found that does not have
// to be checked afterwards
this.state = <DelimiterMasterFilterState_SkippingVersionsV0> {
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
masterKey: this.marker,
};
} else {
this.state = <DelimiterFilterState_NotSkipping> {
id: DelimiterFilterStateId.NotSkipping,
};
}
} else {
// save base implementation of the `NotSkipping` state in
// Delimiter before overriding it with ours, to be able to call it from there
this.keyHandler_NotSkipping_Delimiter = this.keyHandlers[DelimiterFilterStateId.NotSkipping];
this.setKeyHandler(
DelimiterFilterStateId.NotSkipping,
this.keyHandler_NotSkippingPrefixNorVersionsV1.bind(this));
}
// in v1, we can directly use Delimiter's implementation,
// which is already set to the proper state
// default initialization of the gap cache and building states, can be
// set by refreshGapCache()
this._gapCaching = {
state: GapCachingState.NoGapCache,
};
this._gapBuilding = {
state: GapBuildingState.Disabled,
};
this._refreshedBuildingParams = null;
}
/**
* Get the validity period left before a refresh of the gap cache is needed
* to continue building new gaps.
*
* @return {number|null} one of:
* - the remaining time in milliseconds in which gaps can be added to the
* cache before a call to refreshGapCache() is required
* - or 0 if there is no time left and a call to refreshGapCache() is required
* to resume caching gaps
* - or null if refreshing the cache is never needed (because the gap cache
* is either not available or not used)
*/
getGapBuildingValidityPeriodMs(): number | null {
let gapBuilding;
switch (this._gapBuilding.state) {
case GapBuildingState.Disabled:
return null;
case GapBuildingState.Expired:
return 0;
case GapBuildingState.NotBuilding:
gapBuilding = <GapBuildingInfo_NotBuilding> this._gapBuilding;
break;
case GapBuildingState.Building:
gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
break;
}
const { gapCache, params } = gapBuilding;
const elapsedTime = Date.now() - params.initTimestamp;
return Math.max(gapCache.exposureDelayMs - elapsedTime, 0);
}
/**
* Refresh the gaps caching logic (gaps are series of current delete markers
* in V0 bucket metadata format). It has two effects:
*
* - starts exposing existing and future gaps from the cache to efficiently
* skip over series of current delete markers that have been seen and cached
* earlier
*
* - enables building and caching new gaps (or extend existing ones), for a
* limited time period defined by the `gapCacheProxy.exposureDelayMs` value
* in milliseconds. To refresh the validity period and resume building and
* caching new gaps, one must restart a new listing from the database (starting
* at the current listing key, included), then call refreshGapCache() again.
*
* @param {GapCacheInterface} gapCacheProxy - API proxy to the gaps cache
* (the proxy should handle prefixing object keys with the bucket name)
* @param {number} [minGapWeight=100] - minimum weight of a gap for it to be
* added in the cache
* @param {number} [triggerSaveGapWeight] - cumulative weight to wait for
* before saving the current building gap. Cannot be greater than
* `gapCacheProxy.maxGapWeight` (the value is thresholded to `maxGapWeight`
* otherwise). Defaults to `gapCacheProxy.maxGapWeight / 2`.
* @return {undefined}
*/
refreshGapCache(
gapCacheProxy: GapCacheInterface,
minGapWeight?: number,
triggerSaveGapWeight?: number
): void {
if (this.vFormat !== BucketVersioningKeyFormat.v0) {
return;
}
if (this._gapCaching.state === GapCachingState.NoGapCache) {
this._gapCaching = {
state: GapCachingState.UnknownGap,
gapCache: gapCacheProxy,
};
}
const refreshedBuildingParams: GapBuildingParams = {
minGapWeight: minGapWeight || 100,
triggerSaveGapWeight: triggerSaveGapWeight
|| Math.trunc(gapCacheProxy.maxGapWeight / 2),
initTimestamp: Date.now(),
};
if (this._gapBuilding.state === GapBuildingState.Building) {
// refreshed params will be applied as soon as the current building gap is saved
this._refreshedBuildingParams = refreshedBuildingParams;
} else {
this._gapBuilding = {
state: GapBuildingState.NotBuilding,
gapCache: gapCacheProxy,
params: refreshedBuildingParams,
};
}
}
/**
* Trigger a lookup of the closest upcoming or already skippable gap.
*
* @param {string} fromKey - lookup a gap not before 'fromKey'
* @return {undefined} - the lookup is asynchronous and its
* response is handled inside this function
*/
_triggerGapLookup(gapCaching: GapCachingInfo_NoCachedGap, fromKey: string): void {
this._gapCaching = {
state: GapCachingState.GapLookupInProgress,
gapCache: gapCaching.gapCache,
};
const maxKey = this.prefix ? inc(this.prefix) : undefined;
gapCaching.gapCache.lookupGap(fromKey, maxKey).then(_gap => {
const gap = <GapSetEntry | null> _gap;
if (gap) {
this._gapCaching = {
state: GapCachingState.GapCached,
gapCache: gapCaching.gapCache,
gapCached: gap,
};
} else {
this._gapCaching = {
state: GapCachingState.NoMoreGap,
};
}
});
}
_checkGapOnMasterDeleteMarker(key: string): FilterReturnValue {
switch (this._gapBuilding.state) {
case GapBuildingState.Disabled:
case GapBuildingState.Expired:
break;
case GapBuildingState.NotBuilding:
this._createBuildingGap(key, 1);
break;
case GapBuildingState.Building:
this._updateBuildingGap(key);
break;
}
if (this._gapCaching.state === GapCachingState.GapCached) {
const { gapCached } = this._gapCaching;
if (key >= gapCached.firstKey) {
if (key <= gapCached.lastKey) {
// we are inside the last looked up cached gap: transition to
// 'SkippingGapV0' state
this.setState(<DelimiterMasterFilterState_SkippingGapV0> {
id: DelimiterMasterFilterStateId.SkippingGapV0,
});
// cut the current gap before skipping, it will be merged or
// chained with the existing one (depending on its weight)
if (this._gapBuilding.state === GapBuildingState.Building) {
// substract 1 from the weight because we are going to chain this gap,
// which has an overlap of one key.
this._gapBuilding.gap.weight -= 1;
this._cutBuildingGap();
}
return FILTER_SKIP;
}
// as we are past the cached gap, we will need another lookup
this._gapCaching = {
state: GapCachingState.UnknownGap,
gapCache: this._gapCaching.gapCache,
};
}
}
if (this._gapCaching.state === GapCachingState.UnknownGap) {
this._triggerGapLookup(this._gapCaching, key);
}
return FILTER_ACCEPT;
}
filter_onNewMasterKeyV0(key: string, value: string): FilterReturnValue {
// if this master key is a delete marker, accept it without
// adding the version to the contents
if (Version.isDeleteMarker(value)) {
// update the state to start skipping versions of the new master key
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
masterKey: key,
});
return this._checkGapOnMasterDeleteMarker(key);
}
if (Version.isPHD(value)) {
// master version is a PHD version: wait for the first
// following version that will be considered as the actual
// master key
this.setState(<DelimiterMasterFilterState_WaitVersionAfterPHDV0> {
id: DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
masterKey: key,
});
return FILTER_ACCEPT;
}
// cut the current gap as soon as a non-deleted entry is seen
this._cutBuildingGap();
if (key.startsWith(DbPrefixes.Replay)) {
// skip internal replay prefix entirely
this.setState(<DelimiterFilterState_SkippingPrefix> {
id: DelimiterFilterStateId.SkippingPrefix,
prefix: DbPrefixes.Replay,
});
return FILTER_SKIP;
}
if (this._reachedMaxKeys()) {
return FILTER_END;
}
const commonPrefix = this.addCommonPrefixOrContents(key, value);
if (commonPrefix) {
// transition into SkippingPrefix state to skip all following keys
// while they start with the same prefix
this.setState(<DelimiterFilterState_SkippingPrefix> {
id: DelimiterFilterStateId.SkippingPrefix,
prefix: commonPrefix,
});
return FILTER_ACCEPT;
}
// update the state to start skipping versions of the new master key
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
masterKey: key,
});
return FILTER_ACCEPT;
}
keyHandler_NotSkippingPrefixNorVersionsV0(key: string, value: string): FilterReturnValue {
return this.filter_onNewMasterKeyV0(key, value);
}
filter_onNewMasterKeyV1(key: string, value: string): FilterReturnValue {
// if this master key is a delete marker, accept it without
// adding the version to the contents
if (Version.isDeleteMarker(value)) {
return FILTER_ACCEPT;
}
// use base Delimiter's implementation
return this.keyHandler_NotSkipping_Delimiter(key, value);
}
keyHandler_NotSkippingPrefixNorVersionsV1(key: string, value: string): FilterReturnValue {
return this.filter_onNewMasterKeyV1(key, value);
}
keyHandler_SkippingVersionsV0(key: string, value: string): FilterReturnValue {
/* In the SkippingVersionsV0 state, skip all version keys
* (<key><versionIdSeparator><version>) */
const versionIdIndex = key.indexOf(VID_SEP);
if (versionIdIndex !== -1) {
// version keys count in the building gap weight because they must
// also be listed until skipped
if (this._gapBuilding.state === GapBuildingState.Building) {
this._updateBuildingGap(key);
}
return FILTER_SKIP;
}
return this.filter_onNewMasterKeyV0(key, value);
}
keyHandler_WaitVersionAfterPHDV0(key: string, value: string): FilterReturnValue {
// After a PHD key is encountered, the next version key of the
// same object if it exists is the new master key, hence
// consider it as such and call 'onNewMasterKeyV0' (the test
// 'masterKey == phdKey' is probably redundant when we already
// know we have a versioned key, since all objects in v0 have
// a master key, but keeping it in doubt)
const { masterKey: phdKey } = <DelimiterMasterFilterState_WaitVersionAfterPHDV0> this.state;
const versionIdIndex = key.indexOf(VID_SEP);
if (versionIdIndex !== -1) {
const masterKey = key.slice(0, versionIdIndex);
if (masterKey === phdKey) {
return this.filter_onNewMasterKeyV0(masterKey, value);
}
}
return this.filter_onNewMasterKeyV0(key, value);
}
keyHandler_SkippingGapV0(key: string, value: string): FilterReturnValue {
const { gapCache, gapCached } = <GapCachingInfo_GapCached> this._gapCaching;
if (key <= gapCached.lastKey) {
return FILTER_SKIP;
}
this._gapCaching = {
state: GapCachingState.UnknownGap,
gapCache,
};
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
});
// Start a gap with weight=0 from the latest skippable key. This will
// allow to extend the gap just skipped with a chained gap in case
// other delete markers are seen after the existing gap is skipped.
this._createBuildingGap(gapCached.lastKey, 0, gapCached.weight);
return this.handleKey(key, value);
}
skippingBase(): string | undefined {
switch (this.state.id) {
case DelimiterMasterFilterStateId.SkippingVersionsV0:
const { masterKey } = <DelimiterMasterFilterState_SkippingVersionsV0> this.state;
return masterKey + inc(VID_SEP);
case DelimiterMasterFilterStateId.SkippingGapV0:
const { gapCached } = <GapCachingInfo_GapCached> this._gapCaching;
return gapCached.lastKey;
default:
return super.skippingBase();
}
}
result(): ResultObject {
this._cutBuildingGap();
return super.result();
}
_checkRefreshedBuildingParams(params: GapBuildingParams): GapBuildingParams {
if (this._refreshedBuildingParams) {
const newParams = this._refreshedBuildingParams;
this._refreshedBuildingParams = null;
return newParams;
}
return params;
}
/**
* Save the gap being built if allowed (i.e. still within the
* allocated exposure time window).
*
* @return {boolean} - true if the gap was saved, false if we are
* outside the allocated exposure time window.
*/
_saveBuildingGap(): boolean {
const { gapCache, params, gap, gapWeight } =
<GapBuildingInfo_Building> this._gapBuilding;
const totalElapsed = Date.now() - params.initTimestamp;
if (totalElapsed >= gapCache.exposureDelayMs) {
this._gapBuilding = {
state: GapBuildingState.Expired,
};
this._refreshedBuildingParams = null;
return false;
}
const { firstKey, lastKey, weight } = gap;
gapCache.setGap(firstKey, lastKey, weight);
this._gapBuilding = {
state: GapBuildingState.Building,
gapCache,
params: this._checkRefreshedBuildingParams(params),
gap: {
firstKey: gap.lastKey,
lastKey: gap.lastKey,
weight: 0,
},
gapWeight,
};
return true;
}
/**
* Create a new gap to be extended afterwards
*
* @param {string} newKey - gap's first key
* @param {number} startWeight - initial weight of the building gap (usually 0 or 1)
* @param {number} [cachedWeight] - if continuing a cached gap, weight of the existing
* cached portion
* @return {undefined}
*/
_createBuildingGap(newKey: string, startWeight: number, cachedWeight?: number): void {
if (this._gapBuilding.state === GapBuildingState.NotBuilding) {
const { gapCache, params } = <GapBuildingInfo_NotBuilding> this._gapBuilding;
this._gapBuilding = {
state: GapBuildingState.Building,
gapCache,
params: this._checkRefreshedBuildingParams(params),
gap: {
firstKey: newKey,
lastKey: newKey,
weight: startWeight,
},
gapWeight: (cachedWeight || 0) + startWeight,
};
}
}
_updateBuildingGap(newKey: string): void {
const gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
const { params, gap } = gapBuilding;
gap.lastKey = newKey;
gap.weight += 1;
gapBuilding.gapWeight += 1;
// the GapCache API requires updating a gap regularly because it can only split
// it once per update, by the known last key. In practice the default behavior
// is to trigger an update after a number of keys that is half the maximum weight.
// It is also useful for other listings to benefit from the cache sooner.
if (gapBuilding.gapWeight >= params.minGapWeight &&
gap.weight >= params.triggerSaveGapWeight) {
this._saveBuildingGap();
}
}
_cutBuildingGap(): void {
if (this._gapBuilding.state === GapBuildingState.Building) {
let gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
let { gapCache, params, gap, gapWeight } = gapBuilding;
// only set gaps that are significant enough in weight and
// with a non-empty extension
if (gapWeight >= params.minGapWeight && gap.weight > 0) {
// we're done if we were not allowed to save the gap
if (!this._saveBuildingGap()) {
return;
}
// params may have been refreshed, reload them
gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
params = gapBuilding.params;
}
this._gapBuilding = {
state: GapBuildingState.NotBuilding,
gapCache,
params,
};
}
}
}

View File

@ -1,37 +1,82 @@
const { DelimiterVersions } = require('./delimiterVersions'); 'use strict'; // eslint-disable-line strict
const { FILTER_END, FILTER_SKIP } = require('./tools'); const Delimiter = require('./delimiter').Delimiter;
const VSConst = require('../../versioning/constants').VersioningConstants;
const { inc, FILTER_ACCEPT, FILTER_END, SKIP_NONE } = require('./tools');
const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes } = VSConst;
// TODO: find an acceptable timeout value.
const DELIMITER_TIMEOUT_MS = 10 * 1000; // 10s
const TRIM_METADATA_MIN_BLOB_SIZE = 10000; const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
/** /**
* Handle object listing with parameters. This extends the base class DelimiterVersions * Handle object listing with parameters. This extends the base class Delimiter
* to return the raw non-current versions objects. * to return the raw non-current versions objects.
*/ */
class DelimiterNonCurrent extends DelimiterVersions { class DelimiterNonCurrent extends Delimiter {
/** /**
* Delimiter listing of non-current versions. * Delimiter listing of non-current versions.
* @param {Object} parameters - listing parameters * @param {Object} parameters - listing parameters
* @param {String} parameters.keyMarker - key marker
* @param {String} parameters.versionIdMarker - version id marker * @param {String} parameters.versionIdMarker - version id marker
* @param {String} parameters.beforeDate - limit the response to keys with stale date older than beforeDate. * @param {String} parameters.beforeDate - limit the response to keys with stale date older than beforeDate
* stale date is the date on when a version becomes non-current. * stale date is the date on when a version becomes non-current.
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned * @param {String} parameters.keyMarker - key marker
* @param {String} parameters.excludedDataStoreName - exclude dataStoreName matches from the versions
* @param {RequestLogger} logger - The logger of the request * @param {RequestLogger} logger - The logger of the request
* @param {String} [vFormat] - versioning key format * @param {String} [vFormat] - versioning key format
*/ */
constructor(parameters, logger, vFormat) { constructor(parameters, logger, vFormat) {
super(parameters, logger, vFormat); super(parameters, logger, vFormat);
this.versionIdMarker = parameters.versionIdMarker;
this.beforeDate = parameters.beforeDate; this.beforeDate = parameters.beforeDate;
this.excludedDataStoreName = parameters.excludedDataStoreName; this.keyMarker = parameters.keyMarker;
this.maxScannedLifecycleListingEntries = parameters.maxScannedLifecycleListingEntries; this.NextKeyMarker = null;
// internal state this.skipping = this.skippingV1;
this.prevKey = null; this.genMDParams = this.genMDParamsV1;
this.keyName = null;
this.staleDate = null; this.staleDate = null;
this.scannedKeys = 0; // used for monitoring
this.evaluatedKeys = 0;
}
skippingV1() {
return SKIP_NONE;
}
genMDParamsV1() {
const params = {
gte: DbPrefixes.Version,
lt: inc(DbPrefixes.Version),
};
if (this.prefix) {
params.gte = `${DbPrefixes.Version}${this.prefix}`;
params.lt = `${DbPrefixes.Version}${inc(this.prefix)}`;
}
if (this.keyMarker && `${DbPrefixes.Version}${this.keyMarker}` >= params.gte) {
if (this.versionIdMarker) {
// versionIdMarker should always come with keyMarker but may not be the other way around.
// NOTE: "gte" (instead of "gt") is used to include the last version of the "previous"
// truncated listing when a versionId marker is specified.
// This "previous"/"already evaluated" version will be used to retrieve the stale date and
// skipped to not evaluate the same key twice in the addContents() method.
params.gte = DbPrefixes.Version
+ this.keyMarker
+ VID_SEP
+ this.versionIdMarker;
} else {
delete params.gte;
params.gt = DbPrefixes.Version + inc(this.keyMarker + VID_SEP);
}
}
this.start = Date.now();
return params;
} }
getLastModified(value) { getLastModified(value) {
@ -49,91 +94,72 @@ class DelimiterNonCurrent extends DelimiterVersions {
return lastModified; return lastModified;
} }
// Overwrite keyHandler_SkippingVersions to include the last version from the previous listing.
// The creation (last-modified) date of this version will be the stale date for the following version.
// eslint-disable-next-line camelcase
keyHandler_SkippingVersions(key, versionId, value) {
if (key === this.keyMarker) {
// since the nonversioned key equals the marker, there is
// necessarily a versionId in this key
const _versionId = versionId;
if (_versionId < this.versionIdMarker) {
// skip all versions until marker
return FILTER_SKIP;
}
}
this.setState({
id: 1 /* NotSkipping */,
});
return this.handleKey(key, versionId, value);
}
filter(obj) {
if (this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries) {
this.IsTruncated = true;
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
{
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
scannedKeys: this.scannedKeys,
});
return FILTER_END;
}
++this.scannedKeys;
return super.filter(obj);
}
/** /**
* NOTE: Each version of a specific key is sorted from the latest to the oldest * NOTE: Each version of a specific key is sorted from the latest to the oldest
* thanks to the way version ids are generated. * thanks to the way version ids are generated.
* DESCRIPTION: Skip the version if it represents the master key, but keep its last-modified date in memory, * DESCRIPTION: For a given key, the latest version is skipped since it represents the current version or
* which will be the stale date of the following version. * the last version of the previous truncated listing.
* The following version is pushed only: * The current last-modified date is kept in memory and used as a "stale date" for the following version.
* - if the "stale date" (picked up from the previous version) is available (JSON.parse has not failed), * The following version is pushed only if the "stale date" (picked up from the previous version)
* - if "beforeDate" is not specified or if specified and the "stale date" is older. * is available (JSON.parse has not failed), if the "beforeDate" argument is specified, and
* - if "excludedDataStoreName" is not specified or if specified and the data store name is different * the "stale date" is older than the "beforeDate".
* The in-memory "stale date" is then updated with the version's last-modified date to be used for * The in-memory "stale date" is then updated with the version's last-modified date to be used for
* the following version. * the following version.
* The process stops and returns the available results if either: * The process stops and returns the available results if either:
* - no more metadata key is left to be processed * - no more metadata key is left to be processed
* - the listing reaches the maximum number of key to be returned * - the listing reaches the maximum number of key to be returned
* - the internal timeout is reached * - the internal timeout is reached
* @param {String} key - The key to add * @param {String} keyVersionSuffix - The key to add
* @param {String} versionId - The version id
* @param {String} value - The value of the key * @param {String} value - The value of the key
* @return {undefined} * @return {number} - indicates if iteration should continue
*/ */
addVersion(key, versionId, value) { addContents(keyVersionSuffix, value) {
this.nextKeyMarker = key; if (this._reachedMaxKeys()) {
this.nextVersionIdMarker = versionId; return FILTER_END;
// Skip the version if it represents the non-current version, but keep its last-modified date,
// which will be the stale date of the following version.
const isCurrentVersion = key !== this.prevKey;
if (isCurrentVersion) {
this.staleDate = this.getLastModified(value);
this.prevKey = key;
return;
} }
// The following version is pushed only: if (this.start && Date.now() - this.start > DELIMITER_TIMEOUT_MS) {
// - if the "stale date" (picked up from the previous version) is available (JSON.parse has not failed), this.IsTruncated = true;
// - if "beforeDate" is not specified or if specified and the "stale date" is older. this.logger.info('listing stopped after expected internal timeout',
// - if "excludedDataStoreName" is not specified or if specified and the data store name is different {
timeoutMs: DELIMITER_TIMEOUT_MS,
evaluatedKeys: this.evaluatedKeys,
});
return FILTER_END;
}
++this.evaluatedKeys;
const versionIdIndex = keyVersionSuffix.indexOf(VID_SEP);
const key = keyVersionSuffix.slice(0, versionIdIndex);
const versionId = keyVersionSuffix.slice(versionIdIndex + 1);
this.NextKeyMarker = key;
this.NextVersionIdMarker = versionId;
// For a given key, the latest version is skipped since it represents either:
// - the current version or
// - the last version of the previous truncated listing
const isLatestVersion = key !== this.keyName;
if (isLatestVersion) {
this.keyName = key;
// The current last-modified date is kept in memory and used as a "stale date" for the following version.
this.staleDate = this.getLastModified(value);
return FILTER_ACCEPT;
}
// The following version is pushed only if the "stale date" (picked up from the previous version)
// is available (JSON.parse has not failed) and, if the "beforeDate" argument is specified,
// the "stale date" is older than the "beforeDate".
let lastModified; let lastModified;
if (this.staleDate && (!this.beforeDate || this.staleDate < this.beforeDate)) { if (this.staleDate && (!this.beforeDate || this.staleDate < this.beforeDate)) {
const parsedValue = this._parse(value); const v = this.trimMetadataAddStaleDate(value, this.staleDate);
// if parsing fails, skip the key. lastModified = v.lastModified;
if (parsedValue) { const { contentValue } = v;
const dataStoreName = parsedValue.dataStoreName; // check that trimMetadataAddStaleDate succeeds to only push objects with a defined staleDate.
lastModified = parsedValue['last-modified']; if (contentValue) {
if (!this.excludedDataStoreName || dataStoreName !== this.excludedDataStoreName) { this.Contents.push({ key, value: contentValue });
const s = this._stringify(parsedValue, this.staleDate); ++this.keys;
// check that _stringify succeeds to only push objects with a defined staleDate.
if (s) {
this.Versions.push({ key, value: s });
++this.keys;
}
}
} }
} }
@ -141,59 +167,44 @@ class DelimiterNonCurrent extends DelimiterVersions {
// the following version. // the following version.
this.staleDate = lastModified || this.getLastModified(value); this.staleDate = lastModified || this.getLastModified(value);
return; return FILTER_ACCEPT;
} }
/** trimMetadataAddStaleDate(value, staleDate) {
* Parses the stringified entry's value and remove the location property if too large. let ret = undefined;
* @param {string} s - sringified value let lastModified = undefined;
* @return {object} p - undefined if parsing fails, otherwise it contains the parsed value.
*/
_parse(s) {
let p;
try { try {
p = JSON.parse(s); ret = JSON.parse(value);
if (s.length >= TRIM_METADATA_MIN_BLOB_SIZE) { ret.staleDate = staleDate;
delete p.location; lastModified = ret['last-modified'];
if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
delete ret.location;
} }
ret = JSON.stringify(ret);
} catch (e) { } catch (e) {
this.logger.warn('Could not parse Object Metadata while listing', { // Prefer returning an unfiltered data rather than
method: 'DelimiterNonCurrent._parse', // stopping the service in case of parsing failure.
err: e.toString(), // The risk of this approach is a potential
}); // reproduction of MD-692, where too much memory is
// used by repd.
this.logger.warn('could not parse Object Metadata while listing',
{
method: 'trimMetadataAddStaleDate',
err: e.toString(),
});
} }
return p; return { contentValue: ret, lastModified };
}
_stringify(parsedMD, staleDate) {
const p = parsedMD;
let s = undefined;
p.staleDate = staleDate;
try {
s = JSON.stringify(p);
} catch (e) {
this.logger.warn('could not stringify Object Metadata while listing', {
method: 'DelimiterNonCurrent._stringify',
err: e.toString(),
});
}
return s;
} }
result() { result() {
const { Versions, IsTruncated, NextKeyMarker, NextVersionIdMarker } = super.result();
const result = { const result = {
Contents: Versions, Contents: this.Contents,
IsTruncated, IsTruncated: this.IsTruncated,
}; };
if (NextKeyMarker) { if (this.IsTruncated) {
result.NextKeyMarker = NextKeyMarker; result.NextKeyMarker = this.NextKeyMarker;
} result.NextVersionIdMarker = this.NextVersionIdMarker;
if (NextVersionIdMarker) {
result.NextVersionIdMarker = NextVersionIdMarker;
} }
return result; return result;

View File

@ -1,50 +1,45 @@
const DelimiterVersions = require('./delimiterVersions').DelimiterVersions; 'use strict'; // eslint-disable-line strict
const { FILTER_END } = require('./tools'); const Delimiter = require('./delimiter').Delimiter;
const VSConst = require('../../versioning/constants').VersioningConstants;
const { inc, FILTER_ACCEPT, FILTER_END, SKIP_NONE } = require('./tools');
const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes } = VSConst;
const DELIMITER_TIMEOUT_MS = 10 * 1000; // 10s
const TRIM_METADATA_MIN_BLOB_SIZE = 10000; const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
/** /**
* Handle object listing with parameters. This extends the base class DelimiterVersions * Handle object listing with parameters. This extends the base class Delimiter
* to return the orphan delete markers. Orphan delete markers are also * to return the orphan delete markers. Orphan delete markers are also
* refered as expired object delete marker. * refered as expired object delete marker.
* They are delete marker with zero noncurrent versions. * They are delete marker with zero noncurrent versions.
*/ */
class DelimiterOrphanDeleteMarker extends DelimiterVersions { class DelimiterOrphanDeleteMarker extends Delimiter {
/** /**
* Delimiter listing of orphan delete markers. * Delimiter listing of non-current versions.
* @param {Object} parameters - listing parameters * @param {Object} parameters - listing parameters
* @param {String} parameters.beforeDate - limit the response to keys older than beforeDate * @param {String} parameters.beforeDate - limit the response to keys older than beforeDate
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
* @param {RequestLogger} logger - The logger of the request * @param {RequestLogger} logger - The logger of the request
* @param {String} [vFormat] - versioning key format * @param {String} [vFormat] - versioning key format
*/ */
constructor(parameters, logger, vFormat) { constructor(parameters, logger, vFormat) {
const { super(parameters, logger, vFormat);
marker,
maxKeys,
prefix,
beforeDate,
maxScannedLifecycleListingEntries,
} = parameters;
const versionParams = { this.beforeDate = parameters.beforeDate;
// The orphan delete marker logic uses the term 'marker' instead of 'keyMarker',
// as the latter could suggest the presence of a 'versionIdMarker'. this.skipping = this.skippingV1;
keyMarker: marker, this.genMDParams = this.genMDParamsV1;
maxKeys,
prefix,
};
super(versionParams, logger, vFormat);
this.maxScannedLifecycleListingEntries = maxScannedLifecycleListingEntries;
this.beforeDate = beforeDate;
// this.prevKeyName is used as a marker for the next listing when the current one reaches its entry limit.
// We cannot rely on this.keyName, as it contains the name of the current key.
// In the event of a listing interruption due to reaching the maximum scanned entries,
// relying on this.keyName would cause the next listing to skip the current key because S3 starts
// listing after the marker.
this.prevKeyName = null;
this.keyName = null; this.keyName = null;
this.staleDate = null;
this.value = null; this.value = null;
this.scannedKeys = 0;
// used for monitoring
this.evaluatedKeys = 0;
}
skippingV1() {
return SKIP_NONE;
} }
_reachedMaxKeys() { _reachedMaxKeys() {
@ -54,6 +49,28 @@ class DelimiterOrphanDeleteMarker extends DelimiterVersions {
return false; return false;
} }
genMDParamsV1() {
const params = {
gte: DbPrefixes.Version,
lt: inc(DbPrefixes.Version),
};
if (this.prefix) {
params.gte = `${DbPrefixes.Version}${this.prefix}`;
params.lt = `${DbPrefixes.Version}${inc(this.prefix)}`;
}
if (this.marker && `${DbPrefixes.Version}${this.marker}` >= params.gte) {
delete params.gte;
params.gt = DbPrefixes.Version
+ inc(this.marker + VID_SEP);
}
this.start = Date.now();
return params;
}
_addOrphan() { _addOrphan() {
const parsedValue = this._parse(this.value); const parsedValue = this._parse(this.value);
// if parsing fails, skip the key. // if parsing fails, skip the key.
@ -63,73 +80,43 @@ class DelimiterOrphanDeleteMarker extends DelimiterVersions {
// We then check if the orphan version is a delete marker and if it is older than the "beforeDate" // We then check if the orphan version is a delete marker and if it is older than the "beforeDate"
if ((!this.beforeDate || (lastModified && lastModified < this.beforeDate)) && isDeleteMarker) { if ((!this.beforeDate || (lastModified && lastModified < this.beforeDate)) && isDeleteMarker) {
// Prefer returning an untrimmed data rather than stopping the service in case of parsing failure. // Prefer returning an untrimmed data rather than stopping the service in case of parsing failure.
const s = this._stringify(parsedValue) || this.value; const s = this._trimAndStringify(parsedValue) || this.value;
this.Versions.push({ key: this.keyName, value: s }); this.Contents.push({ key: this.keyName, value: s });
this.nextKeyMarker = this.keyName; this.NextMarker = this.keyName;
++this.keys; ++this.keys;
} }
} }
} }
/**
* Parses the stringified entry's value and remove the location property if too large.
* @param {string} s - sringified value
* @return {object} p - undefined if parsing fails, otherwise it contains the parsed value.
*/
_parse(s) { _parse(s) {
let p; let p;
try { try {
p = JSON.parse(s); p = JSON.parse(s);
if (s.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
delete p.location;
}
} catch (e) { } catch (e) {
this.logger.warn('Could not parse Object Metadata while listing', { this.logger.warn(
method: 'DelimiterOrphanDeleteMarker._parse', 'Could not parse Object Metadata while listing',
err: e.toString(), { err: e.toString() });
});
} }
return p; return p;
} }
_stringify(value) { _trimAndStringify(value) {
const p = value; const p = value;
let s = undefined; let s = undefined;
try { try {
if (p.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
delete p.location;
}
s = JSON.stringify(p); s = JSON.stringify(p);
} catch (e) { } catch (e) {
this.logger.warn('could not stringify Object Metadata while listing', this.logger.warn('could not trim and stringify Object Metadata while listing',
{ {
method: 'DelimiterOrphanDeleteMarker._stringify', method: 'trimMetadataAddStaleDate',
err: e.toString(), err: e.toString(),
}); });
} }
return s; return s;
} }
/**
* The purpose of _isMaxScannedEntriesReached is to restrict the number of scanned entries,
* thus controlling resource overhead (CPU...).
* @return {boolean} isMaxScannedEntriesReached - true if the maximum limit on the number
* of entries scanned has been reached, false otherwise.
*/
_isMaxScannedEntriesReached() {
return this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries;
}
filter(obj) {
if (this._isMaxScannedEntriesReached()) {
this.nextKeyMarker = this.prevKeyName;
this.IsTruncated = true;
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
{
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
scannedKeys: this.scannedKeys,
});
return FILTER_END;
}
++this.scannedKeys;
return super.filter(obj);
}
/** /**
* NOTE: Each version of a specific key is sorted from the latest to the oldest * NOTE: Each version of a specific key is sorted from the latest to the oldest
@ -143,62 +130,73 @@ class DelimiterOrphanDeleteMarker extends DelimiterVersions {
* - the internal timeout is reached * - the internal timeout is reached
* NOTE: we cannot leverage MongoDB to list keys older than "beforeDate" * NOTE: we cannot leverage MongoDB to list keys older than "beforeDate"
* because then we will not be able to assess its orphanage. * because then we will not be able to assess its orphanage.
* @param {String} key - The object key. * @param {String} keyVersionSuffix - The key with version id as a suffix.
* @param {String} versionId - The object version id. * @param {String} value - The value of the key
* @param {String} value - The value of the key * @return {number} - indicates if iteration should continue
* @return {undefined}
*/ */
addVersion(key, versionId, value) { addContents(keyVersionSuffix, value) {
if (this._reachedMaxKeys()) {
return FILTER_END;
}
if (this.start && Date.now() - this.start > DELIMITER_TIMEOUT_MS) {
this.IsTruncated = true;
this.NextMarker = this.keyName;
this.logger.info('listing stopped after expected internal timeout',
{
timeoutMs: DELIMITER_TIMEOUT_MS,
evaluatedKeys: this.evaluatedKeys,
});
return FILTER_END;
}
++this.evaluatedKeys;
const versionIdIndex = keyVersionSuffix.indexOf(VID_SEP);
// key without version suffix
const key = keyVersionSuffix.slice(0, versionIdIndex);
// For a given key, the youngest version is kept in memory since it represents the current version. // For a given key, the youngest version is kept in memory since it represents the current version.
if (key !== this.keyName) { if (key !== this.keyName) {
// If this.value is defined, it means that <this.keyName, this.value> pair is "allowed" to be an orphan. // If this.value is defined, it means that <this.keyName, this.value> pair is "allowed" to be an orphan.
if (this.value) { if (this.value) {
this._addOrphan(); this._addOrphan();
} }
this.prevKeyName = this.keyName;
this.keyName = key; this.keyName = key;
this.value = value; this.value = value;
return; return FILTER_ACCEPT;
} }
// If the key is not the current version, we can skip it in the next listing
// in the case where the current listing is interrupted due to reaching the maximum scanned entries.
this.prevKeyName = key;
this.keyName = key; this.keyName = key;
this.value = null; this.value = null;
return; return FILTER_ACCEPT;
} }
result() { result() {
// Only check for remaining last orphan delete marker if the listing is not interrupted. // The following check makes sure the last orphan delete marker is not forgotten.
// This will help avoid false positives. if (this.keys < this.maxKeys) {
if (!this._isMaxScannedEntriesReached()) { if (this.value) {
// The following check makes sure the last orphan delete marker is not forgotten. this._addOrphan();
if (this.keys < this.maxKeys) {
if (this.value) {
this._addOrphan();
}
// The following make sure that if makeKeys is reached, isTruncated is set to true.
// We moved the "isTruncated" from _reachedMaxKeys to make sure we take into account the last entity
// if listing is truncated right before the last entity and the last entity is a orphan delete marker.
} else {
this.IsTruncated = this.maxKeys > 0;
} }
// The following make sure that if makeKeys is reached, isTruncated is set to true.
// We moved the "isTruncated" from _reachedMaxKeys to make sure we take into account the last entity
// if listing is truncated right before the last entity and the last entity is a orphan delete marker.
} else {
this.IsTruncated = this.maxKeys > 0;
} }
const result = { const result = {
Contents: this.Versions, Contents: this.Contents,
IsTruncated: this.IsTruncated, IsTruncated: this.IsTruncated,
}; };
if (this.IsTruncated) { if (this.IsTruncated) {
result.NextMarker = this.nextKeyMarker; result.NextMarker = this.NextMarker;
} }
return result; return result;
} }
} }
module.exports = { DelimiterOrphanDeleteMarker }; module.exports = { DelimiterOrphanDeleteMarker };

View File

@ -1,12 +1,6 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const Extension = require('./Extension').default; const Delimiter = require('./delimiter').Delimiter;
import {
FilterState,
FilterReturnValue,
} from './delimiter';
const Version = require('../../versioning/Version').Version; const Version = require('../../versioning/Version').Version;
const VSConst = require('../../versioning/constants').VersioningConstants; const VSConst = require('../../versioning/constants').VersioningConstants;
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
@ -15,10 +9,24 @@ const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
const VID_SEP = VSConst.VersionId.Separator; const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst; const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
// TODO: when S3C-4682 code is back, cleanup fields, methods and types
// already present in Delimiter class
export interface FilterState {
id: number,
};
export interface FilterReturnValue {
FILTER_ACCEPT,
FILTER_SKIP,
FILTER_END,
};
export const enum DelimiterVersionsFilterStateId { export const enum DelimiterVersionsFilterStateId {
NotSkipping = 1, NotSkipping = 1,
SkippingPrefix = 2, SkippingPrefix = 2,
SkippingVersions = 3, WaitForNullKey = 3,
SkippingVersions = 4,
}; };
export interface DelimiterVersionsFilterState_NotSkipping extends FilterState { export interface DelimiterVersionsFilterState_NotSkipping extends FilterState {
@ -30,12 +38,16 @@ export interface DelimiterVersionsFilterState_SkippingPrefix extends FilterState
prefix: string; prefix: string;
}; };
export interface DelimiterVersionsFilterState_WaitForNullKey extends FilterState {
id: DelimiterVersionsFilterStateId.WaitForNullKey,
};
export interface DelimiterVersionsFilterState_SkippingVersions extends FilterState { export interface DelimiterVersionsFilterState_SkippingVersions extends FilterState {
id: DelimiterVersionsFilterStateId.SkippingVersions, id: DelimiterVersionsFilterStateId.SkippingVersions,
gt: string; gt: string;
}; };
type KeyHandler = (key: string, versionId: string | undefined, value: string) => FilterReturnValue; type KeyHandler = (key: string, value: string) => FilterReturnValue;
type ResultObject = { type ResultObject = {
CommonPrefixes: string[], CommonPrefixes: string[],
@ -68,17 +80,13 @@ type GenMDParamsItem = {
* @prop {String|undefined} prefix - prefix per amazon format * @prop {String|undefined} prefix - prefix per amazon format
* @prop {Number} maxKeys - number of keys to list * @prop {Number} maxKeys - number of keys to list
*/ */
export class DelimiterVersions extends Extension { export class DelimiterVersions extends Delimiter {
state: FilterState; state: FilterState;
keyHandlers: { [id: number]: KeyHandler }; keyHandlers: { [id: number]: KeyHandler };
constructor(parameters, logger, vFormat) { constructor(parameters, logger, vFormat) {
super(parameters, logger); super(parameters, logger, vFormat);
// original listing parameters
this.delimiter = parameters.delimiter;
this.prefix = parameters.prefix;
this.maxKeys = parameters.maxKeys || 1000;
// specific to version listing // specific to version listing
this.keyMarker = parameters.keyMarker; this.keyMarker = parameters.keyMarker;
this.versionIdMarker = parameters.versionIdMarker; this.versionIdMarker = parameters.versionIdMarker;
@ -86,11 +94,7 @@ export class DelimiterVersions extends Extension {
this.masterKey = undefined; this.masterKey = undefined;
this.masterVersionId = undefined; this.masterVersionId = undefined;
this.nullKey = null; this.nullKey = null;
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
// listing results // listing results
this.CommonPrefixes = [];
this.Versions = [];
this.IsTruncated = false;
this.nextKeyMarker = parameters.keyMarker; this.nextKeyMarker = parameters.keyMarker;
this.nextVersionIdMarker = undefined; this.nextVersionIdMarker = undefined;
@ -122,14 +126,17 @@ export class DelimiterVersions extends Extension {
DelimiterVersionsFilterStateId.SkippingPrefix, DelimiterVersionsFilterStateId.SkippingPrefix,
this.keyHandler_SkippingPrefix.bind(this)); this.keyHandler_SkippingPrefix.bind(this));
this.setKeyHandler(
DelimiterVersionsFilterStateId.WaitForNullKey,
this.keyHandler_WaitForNullKey.bind(this));
this.setKeyHandler( this.setKeyHandler(
DelimiterVersionsFilterStateId.SkippingVersions, DelimiterVersionsFilterStateId.SkippingVersions,
this.keyHandler_SkippingVersions.bind(this)); this.keyHandler_SkippingVersions.bind(this));
if (this.versionIdMarker) { if (this.versionIdMarker) {
this.state = <DelimiterVersionsFilterState_SkippingVersions> { this.state = <DelimiterVersionsFilterState_WaitForNullKey> {
id: DelimiterVersionsFilterStateId.SkippingVersions, id: DelimiterVersionsFilterStateId.WaitForNullKey,
gt: `${this.keyMarker}${VID_SEP}${this.versionIdMarker}`,
}; };
} else { } else {
this.state = <DelimiterVersionsFilterState_NotSkipping> { this.state = <DelimiterVersionsFilterState_NotSkipping> {
@ -194,20 +201,6 @@ export class DelimiterVersions extends Extension {
return [mParams, vParams]; return [mParams, vParams];
} }
/**
* check if the max keys count has been reached and set the
* final state of the result if it is the case
* @return {Boolean} - indicates if the iteration has to stop
*/
_reachedMaxKeys(): boolean {
if (this.keys >= this.maxKeys) {
// In cases of maxKeys <= 0 -> IsTruncated = false
this.IsTruncated = this.maxKeys > 0;
return true;
}
return false;
}
/** /**
* Used to synchronize listing of M and V prefixes by object key * Used to synchronize listing of M and V prefixes by object key
* *
@ -243,30 +236,6 @@ export class DelimiterVersions extends Extension {
return { key: nonversionedKey, versionId }; return { key: nonversionedKey, versionId };
} }
/**
* Include a key in the listing output, in the Versions or CommonPrefix result
*
* @param {string} key - key (without version ID)
* @param {string} versionId - version ID
* @param {string} value - metadata value
* @return {undefined}
*/
addKey(key: string, versionId: string, value: string) {
// add the subprefix to the common prefixes if the key has the delimiter
const commonPrefix = this.getCommonPrefix(key);
if (commonPrefix) {
this.addCommonPrefix(commonPrefix);
// transition into SkippingPrefix state to skip all following keys
// while they start with the same prefix
this.setState(<DelimiterVersionsFilterState_SkippingPrefix> {
id: DelimiterVersionsFilterStateId.SkippingPrefix,
prefix: commonPrefix,
});
} else {
this.addVersion(key, versionId, value);
}
}
/** /**
* Add a (key, versionId, value) tuple to the listing. * Add a (key, versionId, value) tuple to the listing.
* Set the NextMarker to the current key * Set the NextMarker to the current key
@ -276,8 +245,8 @@ export class DelimiterVersions extends Extension {
* @param {String} value - The value of the key * @param {String} value - The value of the key
* @return {undefined} * @return {undefined}
*/ */
addVersion(key: string, versionId: string, value: string) { addContents(key: string, versionId: string, value: string) {
this.Versions.push({ this.Contents.push({
key, key,
versionId, versionId,
value: this.trimMetadata(value), value: this.trimMetadata(value),
@ -288,9 +257,6 @@ export class DelimiterVersions extends Extension {
} }
getCommonPrefix(key: string): string | undefined { getCommonPrefix(key: string): string | undefined {
if (!this.delimiter) {
return undefined;
}
const baseIndex = this.prefix ? this.prefix.length : 0; const baseIndex = this.prefix ? this.prefix.length : 0;
const delimiterIndex = key.indexOf(this.delimiter, baseIndex); const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
if (delimiterIndex === -1) { if (delimiterIndex === -1) {
@ -309,7 +275,6 @@ export class DelimiterVersions extends Extension {
this.CommonPrefixes.push(commonPrefix); this.CommonPrefixes.push(commonPrefix);
++this.keys; ++this.keys;
this.nextKeyMarker = commonPrefix; this.nextKeyMarker = commonPrefix;
this.nextVersionIdMarker = undefined;
} }
/** /**
@ -325,6 +290,21 @@ export class DelimiterVersions extends Extension {
this.nullKey = { key, versionId, value }; this.nullKey = { key, versionId, value };
} }
/**
* Add the cached null key to the results. This is called when
* reaching the correct position for the null key in the output.
*
* @return {undefined}
*/
addCurrentNullKey(): void {
this.addContents(
this.nullKey.key,
this.nullKey.versionId,
this.nullKey.value,
);
this.nullKey = null;
}
getObjectKeyV0(obj: { key: string }): string { getObjectKeyV0(obj: { key: string }): string {
return obj.key; return obj.key;
} }
@ -348,24 +328,7 @@ export class DelimiterVersions extends Extension {
const key = this.getObjectKey(obj); const key = this.getObjectKey(obj);
const value = obj.value; const value = obj.value;
const { key: nonversionedKey, versionId: keyVersionId } = this.parseKey(key); return this.handleKey(key, value);
if (this.nullKey) {
if (this.nullKey.key !== nonversionedKey
|| this.nullKey.versionId < <string> keyVersionId) {
this.handleKey(
this.nullKey.key, this.nullKey.versionId, this.nullKey.value);
this.nullKey = null;
}
}
if (keyVersionId === '') {
// null key
this.cacheNullKey(nonversionedKey, Version.from(value).getVersionId(), value);
if (this.state.id === DelimiterVersionsFilterStateId.SkippingVersions) {
return FILTER_SKIP;
}
return FILTER_ACCEPT;
}
return this.handleKey(nonversionedKey, keyVersionId, value);
} }
setState(state: FilterState): void { setState(state: FilterState): void {
@ -376,11 +339,11 @@ export class DelimiterVersions extends Extension {
this.keyHandlers[stateId] = keyHandler; this.keyHandlers[stateId] = keyHandler;
} }
handleKey(key: string, versionId: string | undefined, value: string): FilterReturnValue { handleKey(key: string, value: string): FilterReturnValue {
return this.keyHandlers[this.state.id](key, versionId, value); return this.keyHandlers[this.state.id](key, value);
} }
keyHandler_NotSkippingV0(key: string, versionId: string | undefined, value: string): FilterReturnValue { keyHandler_NotSkippingV0(key: string, value: string): FilterReturnValue {
if (key.startsWith(DbPrefixes.Replay)) { if (key.startsWith(DbPrefixes.Replay)) {
// skip internal replay prefix entirely // skip internal replay prefix entirely
this.setState(<DelimiterVersionsFilterState_SkippingPrefix> { this.setState(<DelimiterVersionsFilterState_SkippingPrefix> {
@ -392,37 +355,68 @@ export class DelimiterVersions extends Extension {
if (Version.isPHD(value)) { if (Version.isPHD(value)) {
return FILTER_ACCEPT; return FILTER_ACCEPT;
} }
return this.filter_onNewKey(key, versionId, value); return this.filter_onNewKey(key, value);
} }
keyHandler_NotSkippingV1(key: string, versionId: string | undefined, value: string): FilterReturnValue { keyHandler_NotSkippingV1(key: string, value: string): FilterReturnValue {
// NOTE: this check on PHD is only useful for Artesca, S3C // NOTE: this check on PHD is only useful for Artesca, S3C
// does not use PHDs in V1 format // does not use PHDs in V1 format
if (Version.isPHD(value)) { if (Version.isPHD(value)) {
return FILTER_ACCEPT; return FILTER_ACCEPT;
} }
return this.filter_onNewKey(key, versionId, value); return this.filter_onNewKey(key, value);
} }
filter_onNewKey(key: string, versionId: string | undefined, value: string): FilterReturnValue { filter_onNewKey(key: string, value: string): FilterReturnValue {
if (this._reachedMaxKeys()) { if (this._reachedMaxKeys()) {
return FILTER_END; return FILTER_END;
} }
if (versionId === undefined) { const { key: nonversionedKey, versionId: keyVersionId } = this.parseKey(key);
if (this.nullKey &&
(this.nullKey.key !== nonversionedKey
|| this.nullKey.versionId < <string> keyVersionId)) {
this.addCurrentNullKey();
if (this._reachedMaxKeys()) {
// IsTruncated: true is set, which is wanted because
// there is at least one more key to output: the one
// being processed here
return FILTER_END;
}
}
let versionId: string;
if (keyVersionId === undefined) {
this.masterKey = key; this.masterKey = key;
this.masterVersionId = Version.from(value).getVersionId() || 'null'; this.masterVersionId = Version.from(value).getVersionId() || 'null';
this.addKey(this.masterKey, this.masterVersionId, value); versionId = this.masterVersionId;
} else { } else {
if (this.masterKey === key && this.masterVersionId === versionId) { if (keyVersionId === '') {
// null key
this.cacheNullKey(nonversionedKey, Version.from(value).getVersionId(), value);
return FILTER_ACCEPT;
}
if (this.masterKey === nonversionedKey && this.masterVersionId === keyVersionId) {
// do not add a version key if it is the master version // do not add a version key if it is the master version
return FILTER_ACCEPT; return FILTER_ACCEPT;
} }
this.addKey(key, versionId, value); versionId = keyVersionId;
} }
// add the subprefix to the common prefixes if the key has the delimiter
const commonPrefix = this.getCommonPrefix(nonversionedKey);
if (commonPrefix) {
this.addCommonPrefix(commonPrefix);
// transition into SkippingPrefix state to skip all following keys
// while they start with the same prefix
this.setState(<DelimiterVersionsFilterState_SkippingPrefix> {
id: DelimiterVersionsFilterStateId.SkippingPrefix,
prefix: commonPrefix,
});
return FILTER_ACCEPT;
}
this.addContents(nonversionedKey, versionId, value);
return FILTER_ACCEPT; return FILTER_ACCEPT;
} }
keyHandler_SkippingPrefix(key: string, versionId: string | undefined, value: string): FilterReturnValue { keyHandler_SkippingPrefix(key: string, value: string): FilterReturnValue {
const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state; const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state;
if (key.startsWith(prefix)) { if (key.startsWith(prefix)) {
return FILTER_SKIP; return FILTER_SKIP;
@ -430,11 +424,39 @@ export class DelimiterVersions extends Extension {
this.setState(<DelimiterVersionsFilterState_NotSkipping> { this.setState(<DelimiterVersionsFilterState_NotSkipping> {
id: DelimiterVersionsFilterStateId.NotSkipping, id: DelimiterVersionsFilterStateId.NotSkipping,
}); });
return this.handleKey(key, versionId, value); return this.handleKey(key, value);
} }
keyHandler_SkippingVersions(key: string, versionId: string | undefined, value: string): FilterReturnValue { keyHandler_WaitForNullKey(key: string, value: string): FilterReturnValue {
if (key === this.keyMarker) { const { key: nonversionedKey, versionId } = this.parseKey(key);
if (nonversionedKey !== this.keyMarker) {
this.setState(<DelimiterVersionsFilterState_NotSkipping> {
id: DelimiterVersionsFilterStateId.NotSkipping,
});
return this.handleKey(key, value);
}
// we may now skip versions until VersionIdMarker
this.setState(<DelimiterVersionsFilterState_SkippingVersions> {
id: DelimiterVersionsFilterStateId.SkippingVersions,
gt: `${this.keyMarker}${VID_SEP}${this.versionIdMarker}`,
});
if (versionId === '') {
// only cache the null key if its version is older than
// the current version ID marker, otherwise it has already
// been output in a previous listing output
const nullVersionId = Version.from(value).getVersionId();
if (nullVersionId > this.versionIdMarker) {
this.cacheNullKey(nonversionedKey, nullVersionId, value);
}
return FILTER_SKIP;
}
return this.handleKey(key, value);
}
keyHandler_SkippingVersions(key: string, value: string): FilterReturnValue {
const { key: nonversionedKey, versionId } = this.parseKey(key);
if (nonversionedKey === this.keyMarker) {
// since the nonversioned key equals the marker, there is // since the nonversioned key equals the marker, there is
// necessarily a versionId in this key // necessarily a versionId in this key
const _versionId = <string> versionId; const _versionId = <string> versionId;
@ -450,21 +472,18 @@ export class DelimiterVersions extends Extension {
this.setState(<DelimiterVersionsFilterState_NotSkipping> { this.setState(<DelimiterVersionsFilterState_NotSkipping> {
id: DelimiterVersionsFilterStateId.NotSkipping, id: DelimiterVersionsFilterStateId.NotSkipping,
}); });
return this.handleKey(key, versionId, value); return this.handleKey(key, value);
} }
skippingBase(): string | undefined { skippingBase() {
switch (this.state.id) { switch (this.state.id) {
case DelimiterVersionsFilterStateId.SkippingPrefix: case DelimiterVersionsFilterStateId.SkippingPrefix:
const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state; const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state;
return inc(prefix); return prefix;
case DelimiterVersionsFilterStateId.SkippingVersions: case DelimiterVersionsFilterStateId.SkippingVersions:
const { gt } = <DelimiterVersionsFilterState_SkippingVersions> this.state; const { gt } = <DelimiterVersionsFilterState_SkippingVersions> this.state;
// the contract of skipping() is to return the first key return gt;
// that can be skipped to, so adding a null byte to skip
// over the existing versioned key set in 'gt'
return `${gt}\0`;
default: default:
return SKIP_NONE; return SKIP_NONE;
@ -511,12 +530,12 @@ export class DelimiterVersions extends Extension {
// does not fit, so we know the result is now truncated // does not fit, so we know the result is now truncated
// because there remains the null key to be output. // because there remains the null key to be output.
// //
if (this.nullKey) { if (this.nullKey && !this._reachedMaxKeys()) {
this.handleKey(this.nullKey.key, this.nullKey.versionId, this.nullKey.value); this.addCurrentNullKey();
} }
const result: ResultObject = { const result: ResultObject = {
CommonPrefixes: this.CommonPrefixes, CommonPrefixes: this.CommonPrefixes,
Versions: this.Versions, Versions: this.Contents,
IsTruncated: this.IsTruncated, IsTruncated: this.IsTruncated,
}; };
if (this.delimiter) { if (this.delimiter) {

View File

@ -52,21 +52,21 @@ class Skip {
assert(this.skipRangeCb); assert(this.skipRangeCb);
const filteringResult = this.extension.filter(entry); const filteringResult = this.extension.filter(entry);
const skipTo = this.extension.skipping(); const skippingRange = this.extension.skipping();
if (filteringResult === FILTER_END) { if (filteringResult === FILTER_END) {
this.listingEndCb(); this.listingEndCb();
} else if (filteringResult === FILTER_SKIP } else if (filteringResult === FILTER_SKIP
&& skipTo !== SKIP_NONE) { && skippingRange !== SKIP_NONE) {
if (++this.streakLength >= MAX_STREAK_LENGTH) { if (++this.streakLength >= MAX_STREAK_LENGTH) {
let newRange; let newRange;
if (Array.isArray(skipTo)) { if (Array.isArray(skippingRange)) {
newRange = []; newRange = [];
for (let i = 0; i < skipTo.length; ++i) { for (let i = 0; i < skippingRange.length; ++i) {
newRange.push(skipTo[i]); newRange.push(this._inc(skippingRange[i]));
} }
} else { } else {
newRange = skipTo; newRange = this._inc(skippingRange);
} }
/* Avoid to loop on the same range again and again. */ /* Avoid to loop on the same range again and again. */
if (newRange === this.gteParams) { if (newRange === this.gteParams) {
@ -79,6 +79,16 @@ class Skip {
this.streakLength = 0; this.streakLength = 0;
} }
} }
_inc(str) {
if (!str) {
return str;
}
const lastCharValue = str.charCodeAt(str.length - 1);
const lastCharNewValue = String.fromCharCode(lastCharValue + 1);
return `${str.slice(0, str.length - 1)}${lastCharNewValue}`;
}
} }

View File

@ -14,7 +14,7 @@ function vaultSignatureCb(
err: Error | null, err: Error | null,
authInfo: { message: { body: any } }, authInfo: { message: { body: any } },
log: Logger, log: Logger,
callback: (err: Error | null, data?: any, results?: any, params?: any, infos?: any) => void, callback: (err: Error | null, data?: any, results?: any, params?: any) => void,
streamingV4Params?: any streamingV4Params?: any
) { ) {
// vaultclient API guarantees that it returns: // vaultclient API guarantees that it returns:
@ -38,9 +38,7 @@ function vaultSignatureCb(
} }
// @ts-ignore // @ts-ignore
log.addDefaultFields(auditLog); log.addDefaultFields(auditLog);
return callback(null, userInfo, authorizationResults, streamingV4Params, { return callback(null, userInfo, authorizationResults, streamingV4Params);
accountQuota: info.accountQuota || {},
});
} }
export type AuthV4RequestParams = { export type AuthV4RequestParams = {
@ -386,19 +384,4 @@ export default class Vault {
return callback(null, respBody); return callback(null, respBody);
}); });
} }
report(log: Logger, callback: (err: Error | null, data?: any) => void) {
// call the report function of the client
if (!this.client.report) {
return callback(null, {});
}
// @ts-ignore
return this.client.report(log.getSerializedUids(), (err: Error | null, obj?: any) => {
if (err) {
log.debug(`error from ${this.implName}`, { error: err });
return callback(err);
}
return callback(null, obj);
});
}
} }

View File

@ -163,20 +163,6 @@ function doAuth(
return cb(errors.InternalError); return cb(errors.InternalError);
} }
/**
* This function will generate a version 4 content-md5 header
* It looks at the request path to determine what kind of header encoding is required
*
* @param path - the request path
* @param payload - the request payload to hash
*/
function generateContentMD5Header(
path: string,
payload: string,
) {
const encoding = path && path.startsWith('/_/backbeat/') ? 'hex' : 'base64';
return crypto.createHash('md5').update(payload, 'binary').digest(encoding);
}
/** /**
* This function will generate a version 4 header * This function will generate a version 4 header
* *
@ -189,7 +175,6 @@ function generateContentMD5Header(
* @param [proxyPath] - path that gets proxied by reverse proxy * @param [proxyPath] - path that gets proxied by reverse proxy
* @param [sessionToken] - security token if the access/secret keys * @param [sessionToken] - security token if the access/secret keys
* are temporary credentials from STS * are temporary credentials from STS
* @param [payload] - body of the request if any
*/ */
function generateV4Headers( function generateV4Headers(
request: any, request: any,
@ -197,9 +182,8 @@ function generateV4Headers(
accessKey: string, accessKey: string,
secretKeyValue: string, secretKeyValue: string,
awsService: string, awsService: string,
proxyPath?: string, proxyPath: string,
sessionToken?: string, sessionToken: string
payload?: string,
) { ) {
Object.assign(request, { headers: {} }); Object.assign(request, { headers: {} });
const amzDate = convertUTCtoISO8601(Date.now()); const amzDate = convertUTCtoISO8601(Date.now());
@ -212,7 +196,7 @@ function generateV4Headers(
const timestamp = amzDate; const timestamp = amzDate;
const algorithm = 'AWS4-HMAC-SHA256'; const algorithm = 'AWS4-HMAC-SHA256';
payload = payload || ''; let payload = '';
if (request.method === 'POST') { if (request.method === 'POST') {
payload = queryString.stringify(data, undefined, undefined, { payload = queryString.stringify(data, undefined, undefined, {
encodeURIComponent, encodeURIComponent,
@ -223,7 +207,6 @@ function generateV4Headers(
request.setHeader('host', request._headers.host); request.setHeader('host', request._headers.host);
request.setHeader('x-amz-date', amzDate); request.setHeader('x-amz-date', amzDate);
request.setHeader('x-amz-content-sha256', payloadChecksum); request.setHeader('x-amz-content-sha256', payloadChecksum);
request.setHeader('content-md5', generateContentMD5Header(request.path, payload));
if (sessionToken) { if (sessionToken) {
request.setHeader('x-amz-security-token', sessionToken); request.setHeader('x-amz-security-token', sessionToken);
@ -234,7 +217,6 @@ function generateV4Headers(
.filter(headerName => .filter(headerName =>
headerName.startsWith('x-amz-') headerName.startsWith('x-amz-')
|| headerName.startsWith('x-scal-') || headerName.startsWith('x-scal-')
|| headerName === 'content-md5'
|| headerName === 'host', || headerName === 'host',
).sort().join(';'); ).sort().join(';');
const params = { request, signedHeaders, payloadChecksum, const params = { request, signedHeaders, payloadChecksum,

View File

@ -134,7 +134,7 @@ export default class ChainBackend extends BaseBackend {
} }
const check = (policy) => { const check = (policy) => {
const key = (policy.arn || '') + (policy.versionId || '') + (policy.action || ''); const key = (policy.arn || '') + (policy.versionId || '');
if (!policyMap[key] || !policyMap[key].isAllowed) { if (!policyMap[key] || !policyMap[key].isAllowed) {
policyMap[key] = policy; policyMap[key] = policy;
} }
@ -158,12 +158,6 @@ export default class ChainBackend extends BaseBackend {
if (policyMap[key].versionId) { if (policyMap[key].versionId) {
policyRes.versionId = policyMap[key].versionId; policyRes.versionId = policyMap[key].versionId;
} }
if (policyMap[key].isImplicit !== undefined) {
policyRes.isImplicit = policyMap[key].isImplicit;
}
if (policyMap[key].action) {
policyRes.action = policyMap[key].action;
}
return policyRes; return policyRes;
}); });
} }
@ -212,22 +206,4 @@ export default class ChainBackend extends BaseBackend {
return callback(null, res); return callback(null, res);
}); });
} }
report(reqUid: string, callback: any) {
this._forEachClient((client, done) =>
client.report(reqUid, done),
(err, res) => {
if (err) {
return callback(err);
}
const mergedRes = res.reduce((acc, val) => {
Object.keys(val).forEach(k => {
acc[k] = val[k];
});
return acc;
}, {});
return callback(null, mergedRes);
});
}
} }

View File

@ -161,10 +161,6 @@ class InMemoryBackend extends BaseBackend {
}; };
return cb(null, vaultReturnObject); return cb(null, vaultReturnObject);
} }
report(log: Logger, callback: any) {
return callback(null, {});
}
} }

View File

@ -1,569 +0,0 @@
import cluster, { Worker } from 'cluster';
import * as werelogs from 'werelogs';
import { default as errors } from '../../lib/errors';
const rpcLogger = new werelogs.Logger('ClusterRPC');
/**
* Remote procedure calls support between cluster workers.
*
* When using the cluster module, new processes are forked and are
* dispatched workloads, usually HTTP requests. The ClusterRPC module
* implements a RPC system to send commands to all cluster worker
* processes at once from any particular worker, and retrieve their
* individual command results, like a distributed map operation.
*
* The existing nodejs cluster IPC channel is setup from the primary
* to each worker, but not between workers, so there has to be a hop
* by the primary.
*
* How a command is treated:
*
* - a worker sends a command message to the primary
*
* - the primary then forwards that command to each existing worker
* (including the requestor)
*
* - each worker then executes the command and returns a result or an
* error
*
* - the primary gathers all workers results into an array
*
* - finally, the primary dispatches the results array to the original
* requesting worker
*
*
* Limitations:
*
* - The command payload must be serializable, which means that:
* - it should not contain circular references
* - it should be of a reasonable size to be sent in a single RPC message
*
* - The "toWorkers" parameter of value "*" targets the set of workers
* that are available at the time the command is dispatched. Any new
* worker spawned after the command has been dispatched for
* processing, but before the command completes, don't execute
* the command and hence are not part of the results array.
*
*
* To set it up:
*
* - On the primary:
* if (cluster.isPrimary) {
* setupRPCPrimary();
* }
*
* - On the workers:
* if (!cluster.isPrimary) {
* setupRPCWorker({
* handler1: (payload: object, uids: string, callback: HandlerCallback) => void,
* handler2: ...
* });
* }
* Handler functions will be passed the command payload, request
* serialized uids, and must call the callback when the worker is done
* processing the command:
* callback(error: Error | null | undefined, result?: any)
*
* When this setup is done, any worker can start sending commands by calling
* the async function sendWorkerCommand().
*/
// exported types
export type ResultObject = {
error: Error | null;
result: any;
};
/**
* saved Promise for sendWorkerCommand
*/
export type CommandPromise = {
resolve: (results?: ResultObject[]) => void;
reject: (error: Error) => void;
timeout: NodeJS.Timeout | null;
};
export type HandlerCallback = (error: (Error & { code?: number }) | null | undefined, result?: any) => void;
export type HandlerFunction = (payload: object, uids: string, callback: HandlerCallback) => void;
export type HandlersMap = {
[index: string]: HandlerFunction;
};
export type PrimaryHandlerFunction = (worker: Worker, payload: object, uids: string, callback: HandlerCallback) => void;
export type PrimaryHandlersMap = Record<string, PrimaryHandlerFunction>;
// private types
type RPCMessage<T extends string, P> = {
type: T;
uids: string;
payload: P;
};
type RPCCommandMessage = RPCMessage<'cluster-rpc:command', any> & {
toWorkers: string;
toHandler: string;
};
type MarshalledResultObject = {
error: string | null;
errorCode?: number;
result: any;
};
type RPCCommandResultMessage = RPCMessage<'cluster-rpc:commandResult', MarshalledResultObject>;
type RPCCommandResultsMessage = RPCMessage<'cluster-rpc:commandResults', {
results: MarshalledResultObject[];
}>;
type RPCCommandErrorMessage = RPCMessage<'cluster-rpc:commandError', {
error: string;
}>;
interface RPCSetupOptions {
/**
* As werelogs is not a peerDependency, arsenal and a parent project
* might have their own separate versions duplicated in dependencies.
* The config are therefore not shared.
* Use this to propagate werelogs config to arsenal's ClusterRPC.
*/
werelogsConfig?: Parameters<typeof werelogs.configure>[0];
};
/**
* In primary: store worker IDs that are waiting to be dispatched
* their command's results, as a mapping.
*/
const uidsToWorkerId: {
[index: string]: number;
} = {};
/**
* In primary: store worker responses for commands in progress as a
* mapping.
*
* Result objects are 'null' while the worker is still processing the
* command. When a worker finishes processing it stores the result as:
* {
* error: string | null,
* result: any
* }
*/
const uidsToCommandResults: {
[index: string]: {
[index: number]: MarshalledResultObject | null;
};
} = {};
/**
* In workers: store promise callbacks for commands waiting to be
* dispatched, as a mapping.
*/
const uidsToCommandPromise: {
[index: string]: CommandPromise;
} = {};
function _isRpcMessage(message) {
return (message !== null &&
typeof message === 'object' &&
typeof message.type === 'string' &&
message.type.startsWith('cluster-rpc:'));
}
/**
* Setup cluster RPC system on the primary
*
* @param {object} [handlers] - mapping of handler names to handler functions
* handler function:
* `handler({Worker} worker, {object} payload, {string} uids, {function} callback)`
* handler callback must be called when worker is done with the command:
* `callback({Error|null} error, {any} [result])`
* @return {undefined}
*/
export function setupRPCPrimary(handlers?: PrimaryHandlersMap, options?: RPCSetupOptions) {
if (options?.werelogsConfig) {
werelogs.configure(options.werelogsConfig);
}
cluster.on('message', (worker, message) => {
if (_isRpcMessage(message)) {
_handlePrimaryMessage(worker, message, handlers);
}
});
}
/**
* Setup RPCs on a cluster worker process
*
* @param {object} handlers - mapping of handler names to handler functions
* handler function:
* handler({object} payload, {string} uids, {function} callback)
* handler callback must be called when worker is done with the command:
* callback({Error|null} error, {any} [result])
* @return {undefined}
* }
*/
export function setupRPCWorker(handlers: HandlersMap, options?: RPCSetupOptions) {
if (!process.send) {
throw new Error('fatal: cannot setup cluster RPC: "process.send" is not available');
}
if (options?.werelogsConfig) {
werelogs.configure(options.werelogsConfig);
}
process.on('message', (message: RPCCommandMessage | RPCCommandResultsMessage) => {
if (_isRpcMessage(message)) {
_handleWorkerMessage(message, handlers);
}
});
}
/**
* Send a command for workers to execute in parallel, and wait for results
*
* @param {string} toWorkers - which workers should execute the command
* Currently the supported values are:
* - "*", meaning all workers will execute the command
* - "PRIMARY", meaning primary process will execute the command
* @param {string} toHandler - name of handler that will execute the
* command in workers, as declared in setupRPCWorker() parameter object
* @param {string} uids - unique identifier of the command, must be
* unique across all commands in progress
* @param {object} payload - message payload, sent as-is to the handler
* @param {number} [timeoutMs=60000] - timeout the command with a
* "RequestTimeout" error after this number of milliseconds - set to 0
* to disable timeouts (the command may then hang forever)
* @returns {Promise}
*/
export async function sendWorkerCommand(
toWorkers: string,
toHandler: string,
uids: string,
payload: object,
timeoutMs: number = 60000
) {
if (typeof uids !== 'string') {
rpcLogger.error('missing or invalid "uids" field', { uids });
throw errors.MissingParameter;
}
if (uidsToCommandPromise[uids] !== undefined) {
rpcLogger.error('a command is already in progress with same uids', { uids });
throw errors.OperationAborted;
}
rpcLogger.info('sending command', { toWorkers, toHandler, uids, payload });
return new Promise((resolve, reject) => {
let timeout: NodeJS.Timeout | null = null;
if (timeoutMs) {
timeout = setTimeout(() => {
delete uidsToCommandPromise[uids];
reject(errors.RequestTimeout);
}, timeoutMs);
}
uidsToCommandPromise[uids] = { resolve, reject, timeout };
const message: RPCCommandMessage = {
type: 'cluster-rpc:command',
toWorkers,
toHandler,
uids,
payload,
};
return process.send?.(message);
});
}
/**
* Get the number of commands in flight
* @returns {number}
*/
export function getPendingCommandsCount() {
return Object.keys(uidsToCommandPromise).length;
}
function _dispatchCommandResultsToWorker(
worker: Worker,
uids: string,
resultsArray: MarshalledResultObject[]
): void {
const message: RPCCommandResultsMessage = {
type: 'cluster-rpc:commandResults',
uids,
payload: {
results: resultsArray,
},
};
worker.send(message);
}
function _dispatchCommandErrorToWorker(
worker: Worker,
uids: string,
error: Error,
): void {
const message: RPCCommandErrorMessage = {
type: 'cluster-rpc:commandError',
uids,
payload: {
error: error.message,
},
};
worker.send(message);
}
function _sendPrimaryCommandResult(
worker: Worker,
uids: string,
error: (Error & { code?: number }) | null | undefined,
result?: any
): void {
const message: RPCCommandResultsMessage = {
type: 'cluster-rpc:commandResults',
uids,
payload: {
results: [{ error: error?.message || null, errorCode: error?.code, result }],
},
};
worker.send?.(message);
}
function _handlePrimaryCommandMessage(
fromWorker: Worker,
logger: any,
message: RPCCommandMessage,
handlers?: PrimaryHandlersMap
): void {
const { toWorkers, toHandler, uids, payload } = message;
if (toWorkers === '*') {
if (uidsToWorkerId[uids] !== undefined) {
logger.warn('new command already has a waiting worker with same uids', {
uids, workerId: uidsToWorkerId[uids],
});
return undefined;
}
const commandResults = {};
for (const workerId of Object.keys(cluster.workers || {})) {
commandResults[workerId] = null;
}
uidsToWorkerId[uids] = fromWorker?.id;
uidsToCommandResults[uids] = commandResults;
for (const [workerId, worker] of Object.entries(cluster.workers || {})) {
logger.debug('sending command message to worker', {
workerId, toHandler, payload,
});
if (worker) {
worker.send(message);
}
}
} else if (toWorkers === 'PRIMARY') {
const { toHandler, uids, payload } = message;
const cb: HandlerCallback = (err, result) => _sendPrimaryCommandResult(fromWorker, uids, err, result);
if (toHandler in (handlers || {})) {
return handlers![toHandler](fromWorker, payload, uids, cb);
}
logger.error('no such handler in "toHandler" field from worker command message', {
toHandler,
});
return cb(errors.NotImplemented);
} else {
logger.error('unsupported "toWorkers" field from worker command message', {
toWorkers,
});
if (fromWorker) {
_dispatchCommandErrorToWorker(fromWorker, uids, errors.NotImplemented);
}
}
}
function _handlePrimaryCommandResultMessage(
fromWorkerId: number,
logger: any,
message: RPCCommandResultMessage
): void {
const { uids, payload } = message;
const commandResults = uidsToCommandResults[uids];
if (!commandResults) {
logger.warn('received command response message from worker for command not in flight', {
workerId: fromWorkerId,
uids,
});
return undefined;
}
if (commandResults[fromWorkerId] === undefined) {
logger.warn('received command response message with unexpected worker ID', {
workerId: fromWorkerId,
uids,
});
return undefined;
}
if (commandResults[fromWorkerId] !== null) {
logger.warn('ignoring duplicate command response from worker', {
workerId: fromWorkerId,
uids,
});
return undefined;
}
commandResults[fromWorkerId] = payload;
const commandResultsArray = Object.values(commandResults);
if (commandResultsArray.every(response => response !== null)) {
logger.debug('all workers responded to command', { uids });
const completeCommandResultsArray = <MarshalledResultObject[]> commandResultsArray;
const toWorkerId = uidsToWorkerId[uids];
const toWorker = cluster.workers?.[toWorkerId];
delete uidsToCommandResults[uids];
delete uidsToWorkerId[uids];
if (!toWorker) {
logger.warn('worker shut down while its command was executing', {
workerId: toWorkerId, uids,
});
return undefined;
}
// send back response to original worker
_dispatchCommandResultsToWorker(toWorker, uids, completeCommandResultsArray);
}
}
function _handlePrimaryMessage(
fromWorker: Worker,
message: RPCCommandMessage | RPCCommandResultMessage,
handlers?: PrimaryHandlersMap
): void {
const { type: messageType, uids } = message;
const logger = rpcLogger.newRequestLoggerFromSerializedUids(uids);
logger.debug('primary received message from worker', {
workerId: fromWorker?.id, rpcMessage: message,
});
if (messageType === 'cluster-rpc:command') {
return _handlePrimaryCommandMessage(fromWorker, logger, message, handlers);
}
if (messageType === 'cluster-rpc:commandResult') {
return _handlePrimaryCommandResultMessage(fromWorker?.id, logger, message);
}
logger.error('unsupported message type', {
workerId: fromWorker?.id, messageType, uids,
});
return undefined;
}
function _sendWorkerCommandResult(
uids: string,
error: Error | null | undefined,
result?: any
): void {
const message: RPCCommandResultMessage = {
type: 'cluster-rpc:commandResult',
uids,
payload: {
error: error ? error.message : null,
result,
},
};
process.send?.(message);
}
function _handleWorkerCommandMessage(
logger: any,
message: RPCCommandMessage,
handlers: HandlersMap
): void {
const { toHandler, uids, payload } = message;
const cb: HandlerCallback = (err, result) => _sendWorkerCommandResult(uids, err, result);
if (toHandler in handlers) {
return handlers[toHandler](payload, uids, cb);
}
logger.error('no such handler in "toHandler" field from worker command message', {
toHandler,
});
return cb(errors.NotImplemented);
}
function _handleWorkerCommandResultsMessage(
logger: any,
message: RPCCommandResultsMessage,
): void {
const { uids, payload } = message;
const { results } = payload;
const commandPromise: CommandPromise = uidsToCommandPromise[uids];
if (commandPromise === undefined) {
logger.error('missing promise for command results', { uids, payload });
return undefined;
}
if (commandPromise.timeout) {
clearTimeout(commandPromise.timeout);
}
delete uidsToCommandPromise[uids];
const unmarshalledResults = results.map(workerResult => {
let workerError: Error | null = null;
if (workerResult.error) {
if (workerResult.error in errors) {
workerError = errors[workerResult.error];
} else {
workerError = new Error(workerResult.error);
}
}
if (workerError && workerResult.errorCode) {
(workerError as Error & { code: number }).code = workerResult.errorCode;
}
const unmarshalledResult: ResultObject = {
error: workerError,
result: workerResult.result,
};
return unmarshalledResult;
});
return commandPromise.resolve(unmarshalledResults);
}
function _handleWorkerCommandErrorMessage(
logger: any,
message: RPCCommandErrorMessage,
): void {
const { uids, payload } = message;
const { error } = payload;
const commandPromise: CommandPromise = uidsToCommandPromise[uids];
if (commandPromise === undefined) {
logger.error('missing promise for command results', { uids, payload });
return undefined;
}
if (commandPromise.timeout) {
clearTimeout(commandPromise.timeout);
}
delete uidsToCommandPromise[uids];
let commandError: Error | null = null;
if (error in errors) {
commandError = errors[error];
} else {
commandError = new Error(error);
}
return commandPromise.reject(<Error> commandError);
}
function _handleWorkerMessage(
message: RPCCommandMessage | RPCCommandResultsMessage | RPCCommandErrorMessage,
handlers: HandlersMap
): void {
const { type: messageType, uids } = message;
const workerId = cluster.worker?.id;
const logger = rpcLogger.newRequestLoggerFromSerializedUids(uids);
logger.debug('worker received message from primary', {
workerId, rpcMessage: message,
});
if (messageType === 'cluster-rpc:command') {
return _handleWorkerCommandMessage(logger, message, handlers);
}
if (messageType === 'cluster-rpc:commandResults') {
return _handleWorkerCommandResultsMessage(logger, message);
}
if (messageType === 'cluster-rpc:commandError') {
return _handleWorkerCommandErrorMessage(logger, message);
}
logger.error('unsupported message type', {
workerId, messageType,
});
return undefined;
}

View File

@ -171,7 +171,3 @@ export const maxCachedBuckets = process.env.METADATA_MAX_CACHED_BUCKETS ?
Number(process.env.METADATA_MAX_CACHED_BUCKETS) : 1000; Number(process.env.METADATA_MAX_CACHED_BUCKETS) : 1000;
export const validRestoreObjectTiers = new Set(['Expedited', 'Standard', 'Bulk']); export const validRestoreObjectTiers = new Set(['Expedited', 'Standard', 'Bulk']);
export const maxBatchingConcurrentOperations = 5;
/** For policy resource arn check we allow empty account ID to not break compatibility */
export const policyArnAllowedEmptyAccountId = ['utapi', 'scuba'];

View File

@ -148,7 +148,7 @@ export class IndexTransaction {
'missing condition for conditional put' 'missing condition for conditional put'
); );
} }
if (typeof condition.notExists !== 'string' && typeof condition.exists !== 'string') { if (typeof condition.notExists !== 'string') {
throw propError( throw propError(
'unsupportedConditionalOperation', 'unsupportedConditionalOperation',
'missing key or supported condition' 'missing key or supported condition'

View File

@ -690,11 +690,6 @@ export const ReportNotPresent: ErrorFormat = {
'The request was rejected because the credential report does not exist. To generate a credential report, use GenerateCredentialReport.', 'The request was rejected because the credential report does not exist. To generate a credential report, use GenerateCredentialReport.',
}; };
export const Found: ErrorFormat = {
code: 302,
description: 'Resource Found'
};
// ------------- Special non-AWS S3 errors ------------- // ------------- Special non-AWS S3 errors -------------
export const MPUinProgress: ErrorFormat = { export const MPUinProgress: ErrorFormat = {
@ -1042,15 +1037,3 @@ export const AuthMethodNotImplemented: ErrorFormat = {
description: 'AuthMethodNotImplemented', description: 'AuthMethodNotImplemented',
code: 501, code: 501,
}; };
// --------------------- quotaErros ---------------------
export const NoSuchQuota: ErrorFormat = {
code: 404,
description: 'The specified resource does not have a quota.',
};
export const QuotaExceeded: ErrorFormat = {
code: 429,
description: 'The quota set for the resource is exceeded.',
};

View File

@ -1,5 +1,3 @@
import { RequestLogger } from 'werelogs';
import { legacyLocations } from '../constants'; import { legacyLocations } from '../constants';
import escapeForXml from '../s3middleware/escapeForXml'; import escapeForXml from '../s3middleware/escapeForXml';

View File

@ -101,7 +101,6 @@ export default class BucketInfo {
_azureInfo: any | null; _azureInfo: any | null;
_ingestion: { status: 'enabled' | 'disabled' } | null; _ingestion: { status: 'enabled' | 'disabled' } | null;
_capabilities?: Capabilities; _capabilities?: Capabilities;
_quotaMax: number | 0;
/** /**
* Represents all bucket information. * Represents all bucket information.
@ -158,7 +157,6 @@ export default class BucketInfo {
* @param [notificationConfiguration] - bucket notification configuration * @param [notificationConfiguration] - bucket notification configuration
* @param [tags] - bucket tag set * @param [tags] - bucket tag set
* @param [capabilities] - capabilities for the bucket * @param [capabilities] - capabilities for the bucket
* @param quotaMax - bucket quota
*/ */
constructor( constructor(
name: string, name: string,
@ -187,7 +185,6 @@ export default class BucketInfo {
notificationConfiguration?: any, notificationConfiguration?: any,
tags?: Array<BucketTag> | [], tags?: Array<BucketTag> | [],
capabilities?: Capabilities, capabilities?: Capabilities,
quotaMax?: number | 0,
) { ) {
assert.strictEqual(typeof name, 'string'); assert.strictEqual(typeof name, 'string');
assert.strictEqual(typeof owner, 'string'); assert.strictEqual(typeof owner, 'string');
@ -288,10 +285,6 @@ export default class BucketInfo {
tags = [] as BucketTag[]; tags = [] as BucketTag[];
} }
assert.strictEqual(areTagsValid(tags), true); assert.strictEqual(areTagsValid(tags), true);
if (quotaMax) {
assert.strictEqual(typeof quotaMax, 'number');
assert(quotaMax >= 0, 'Quota cannot be negative');
}
// IF UPDATING PROPERTIES, INCREMENT MODELVERSION NUMBER ABOVE // IF UPDATING PROPERTIES, INCREMENT MODELVERSION NUMBER ABOVE
this._acl = aclInstance; this._acl = aclInstance;
@ -320,7 +313,6 @@ export default class BucketInfo {
this._notificationConfiguration = notificationConfiguration || null; this._notificationConfiguration = notificationConfiguration || null;
this._tags = tags; this._tags = tags;
this._capabilities = capabilities || undefined; this._capabilities = capabilities || undefined;
this._quotaMax = quotaMax || 0;
return this; return this;
} }
@ -356,7 +348,6 @@ export default class BucketInfo {
notificationConfiguration: this._notificationConfiguration, notificationConfiguration: this._notificationConfiguration,
tags: this._tags, tags: this._tags,
capabilities: this._capabilities, capabilities: this._capabilities,
quotaMax: this._quotaMax,
}; };
const final = this._websiteConfiguration const final = this._websiteConfiguration
? { ? {
@ -383,7 +374,7 @@ export default class BucketInfo {
obj.bucketPolicy, obj.uid, obj.readLocationConstraint, obj.isNFS, obj.bucketPolicy, obj.uid, obj.readLocationConstraint, obj.isNFS,
obj.ingestion, obj.azureInfo, obj.objectLockEnabled, obj.ingestion, obj.azureInfo, obj.objectLockEnabled,
obj.objectLockConfiguration, obj.notificationConfiguration, obj.tags, obj.objectLockConfiguration, obj.notificationConfiguration, obj.tags,
obj.capabilities, obj.quotaMax); obj.capabilities);
} }
/** /**
@ -410,8 +401,7 @@ export default class BucketInfo {
data._bucketPolicy, data._uid, data._readLocationConstraint, data._bucketPolicy, data._uid, data._readLocationConstraint,
data._isNFS, data._ingestion, data._azureInfo, data._isNFS, data._ingestion, data._azureInfo,
data._objectLockEnabled, data._objectLockConfiguration, data._objectLockEnabled, data._objectLockConfiguration,
data._notificationConfiguration, data._tags, data._capabilities, data._notificationConfiguration, data._tags, data._capabilities);
data._quotaMax);
} }
/** /**
@ -949,22 +939,4 @@ export default class BucketInfo {
this._capabilities = capabilities; this._capabilities = capabilities;
return this; return this;
} }
/**
* Get the bucket quota information
* @return quotaMax
*/
getQuota() {
return this._quotaMax;
}
/**
* Set bucket quota
* @param quota - quota to be set
* @return - bucket quota info
*/
setQuota(quota: number) {
this._quotaMax = quota || 0;
return this;
}
} }

View File

@ -666,38 +666,13 @@ export default class LifecycleConfiguration {
* @return Returns an error object or `null` * @return Returns an error object or `null`
*/ */
_checkDate(date: string) { _checkDate(date: string) {
const isoRegex = new RegExp( const isoRegex = new RegExp('^(-?(?:[1-9][0-9]*)?[0-9]{4})-' +
"^(-?(?:[1-9][0-9]*)?[0-9]{4})" + // Year '(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9])' +
"-(1[0-2]|0[1-9])" + // Month ':([0-5][0-9]):([0-5][0-9])(.[0-9]+)?(Z)?$');
"-(3[01]|0[1-9]|[12][0-9])" + // Day if (!isoRegex.test(date)) {
"T(2[0-3]|[01][0-9])" + // Hour
":([0-5][0-9])" + // Minute
":([0-5][0-9])" + // Second
"(\\.[0-9]+)?" + // Fractional second
"(Z|[+-][01][0-9]:[0-5][0-9])?$", // Timezone
"g"
);
const matches = [...date.matchAll(isoRegex)];
if (matches.length !== 1) {
const msg = 'Date must be in ISO 8601 format'; const msg = 'Date must be in ISO 8601 format';
return errors.InvalidArgument.customizeDescription(msg); return errors.InvalidArgument.customizeDescription(msg);
} }
// Check for a timezone in the last match group. If none, add a Z to indicate UTC.
if (!matches[0][matches[0].length-1]) {
date += 'Z';
}
const dateObj = new Date(date);
if (Number.isNaN(dateObj.getTime())) {
const msg = 'Date is not a valid date';
return errors.InvalidArgument.customizeDescription(msg);
}
if (dateObj.getUTCHours() !== 0
|| dateObj.getUTCMinutes() !== 0
|| dateObj.getUTCSeconds() !== 0
|| dateObj.getUTCMilliseconds() !== 0) {
const msg = '\'Date\' must be at midnight GMT';
return errors.InvalidArgument.customizeDescription(msg);
}
return null; return null;
} }

View File

@ -58,7 +58,6 @@ export type ObjectMDData = {
'x-amz-server-side-encryption-customer-algorithm': string; 'x-amz-server-side-encryption-customer-algorithm': string;
'x-amz-website-redirect-location': string; 'x-amz-website-redirect-location': string;
'x-amz-scal-transition-in-progress'?: boolean; 'x-amz-scal-transition-in-progress'?: boolean;
'x-amz-scal-transition-time'?: string;
azureInfo?: any; azureInfo?: any;
acl: ACL; acl: ACL;
key: string; key: string;
@ -650,24 +649,10 @@ export default class ObjectMD {
* Set metadata transition in progress value * Set metadata transition in progress value
* *
* @param inProgress - True if transition is in progress, false otherwise * @param inProgress - True if transition is in progress, false otherwise
* @param transitionTime - Date when the transition started
* @return itself * @return itself
*/ */
setTransitionInProgress(inProgress: false): this setTransitionInProgress(inProgress: boolean) {
setTransitionInProgress(inProgress: true, transitionTime: Date|string|number): this
setTransitionInProgress(inProgress: boolean, transitionTime?: Date|string|number) {
this._data['x-amz-scal-transition-in-progress'] = inProgress; this._data['x-amz-scal-transition-in-progress'] = inProgress;
if (!inProgress || !transitionTime) {
delete this._data['x-amz-scal-transition-time'];
} else {
if (typeof transitionTime === 'number') {
transitionTime = new Date(transitionTime);
}
if (transitionTime instanceof Date) {
transitionTime = transitionTime.toISOString();
}
this._data['x-amz-scal-transition-time'] = transitionTime;
}
return this; return this;
} }
@ -680,14 +665,6 @@ export default class ObjectMD {
return this._data['x-amz-scal-transition-in-progress']; return this._data['x-amz-scal-transition-in-progress'];
} }
/**
* Gets the transition time of the object.
* @returns The transition time of the object.
*/
getTransitionTime() {
return this._data['x-amz-scal-transition-time'];
}
/** /**
* Set access control list * Set access control list
* *

View File

@ -1,8 +1,6 @@
import assert from 'assert'; import assert from 'assert';
import UUID from 'uuid'; import UUID from 'uuid';
import { RequestLogger } from 'werelogs';
import escapeForXml from '../s3middleware/escapeForXml'; import escapeForXml from '../s3middleware/escapeForXml';
import errors from '../errors'; import errors from '../errors';
import { isValidBucketName } from '../s3routes/routesUtils'; import { isValidBucketName } from '../s3routes/routesUtils';

View File

@ -435,6 +435,7 @@ export default class Server {
this._server.on('connection', sock => { this._server.on('connection', sock => {
// Setting no delay of the socket to the value configured // Setting no delay of the socket to the value configured
// TODO fix this // TODO fix this
// @ts-expect-errors
sock.setNoDelay(this.isNoDelay()); sock.setNoDelay(this.isNoDelay());
sock.on('error', err => this._logger.info( sock.on('error', err => this._logger.info(
'socket error - request rejected', { error: err })); 'socket error - request rejected', { error: err }));

View File

@ -62,7 +62,7 @@ export default class HealthProbeServer extends httpServer {
_onLiveness( _onLiveness(
_req: http.IncomingMessage, _req: http.IncomingMessage,
res: http.ServerResponse, res: http.ServerResponse,
log: werelogs.RequestLogger, log: RequestLogger,
) { ) {
if (this._livenessCheck(log)) { if (this._livenessCheck(log)) {
sendSuccess(res, log); sendSuccess(res, log);
@ -74,7 +74,7 @@ export default class HealthProbeServer extends httpServer {
_onReadiness( _onReadiness(
_req: http.IncomingMessage, _req: http.IncomingMessage,
res: http.ServerResponse, res: http.ServerResponse,
log: werelogs.RequestLogger, log: RequestLogger,
) { ) {
if (this._readinessCheck(log)) { if (this._readinessCheck(log)) {
sendSuccess(res, log); sendSuccess(res, log);

View File

@ -16,7 +16,7 @@ export const DEFAULT_METRICS_ROUTE = '/metrics';
* @param log - Werelogs instance for logging if you choose to * @param log - Werelogs instance for logging if you choose to
*/ */
export type ProbeDelegate = (res: http.ServerResponse, log: werelogs.RequestLogger) => string | void export type ProbeDelegate = (res: http.ServerResponse, log: RequestLogger) => string | void
export type ProbeServerParams = { export type ProbeServerParams = {
port: number; port: number;

View File

@ -1,7 +1,4 @@
import * as http from 'http'; import * as http from 'http';
import { RequestLogger } from 'werelogs';
import { ArsenalError } from '../../errors'; import { ArsenalError } from '../../errors';
/** /**

View File

@ -119,7 +119,7 @@ export default class RESTClient {
method: string, method: string,
headers: http.OutgoingHttpHeaders | null, headers: http.OutgoingHttpHeaders | null,
key: string | null, key: string | null,
log: werelogs.RequestLogger, log: RequestLogger,
responseCb: (res: http.IncomingMessage) => void, responseCb: (res: http.IncomingMessage) => void,
) { ) {
const reqHeaders = headers || {}; const reqHeaders = headers || {};

View File

@ -25,7 +25,7 @@ function setContentRange(
function sendError( function sendError(
res: http.ServerResponse, res: http.ServerResponse,
log: werelogs.RequestLogger, log: RequestLogger,
error: ArsenalError, error: ArsenalError,
optMessage?: string, optMessage?: string,
) { ) {
@ -141,7 +141,7 @@ export default class RESTServer extends httpServer {
_onPut( _onPut(
req: http.IncomingMessage, req: http.IncomingMessage,
res: http.ServerResponse, res: http.ServerResponse,
log: werelogs.RequestLogger, log: RequestLogger,
) { ) {
let size: number; let size: number;
try { try {
@ -183,7 +183,7 @@ export default class RESTServer extends httpServer {
_onGet( _onGet(
req: http.IncomingMessage, req: http.IncomingMessage,
res: http.ServerResponse, res: http.ServerResponse,
log: werelogs.RequestLogger, log: RequestLogger,
) { ) {
let pathInfo: ReturnType<typeof parseURL>; let pathInfo: ReturnType<typeof parseURL>;
let rangeSpec: ReturnType<typeof httpUtils.parseRangeSpec> | undefined = let rangeSpec: ReturnType<typeof httpUtils.parseRangeSpec> | undefined =
@ -266,7 +266,7 @@ export default class RESTServer extends httpServer {
_onDelete( _onDelete(
req: http.IncomingMessage, req: http.IncomingMessage,
res: http.ServerResponse, res: http.ServerResponse,
log: werelogs.RequestLogger, log: RequestLogger,
) { ) {
let pathInfo: ReturnType<typeof parseURL>; let pathInfo: ReturnType<typeof parseURL>;
try { try {

View File

@ -1,6 +1,6 @@
import ioClient from 'socket.io-client'; import ioClient from 'socket.io-client';
import * as http from 'http'; import * as http from 'http';
import { Server as IOServer } from 'socket.io'; import io from 'socket.io';
import * as sioStream from './sio-stream'; import * as sioStream from './sio-stream';
import async from 'async'; import async from 'async';
import assert from 'assert'; import assert from 'assert';
@ -497,7 +497,7 @@ export function RPCServer(params: {
assert(params.logger); assert(params.logger);
const httpServer = http.createServer(); const httpServer = http.createServer();
const server = new IOServer(httpServer, { maxHttpBufferSize: 1e8 }); const server = io(httpServer);
const log = params.logger; const log = params.logger;
/** /**
@ -508,7 +508,7 @@ export function RPCServer(params: {
* *
* @param {BaseService} serviceList - list of services to register * @param {BaseService} serviceList - list of services to register
*/ */
(server as any).registerServices = function registerServices(...serviceList: any[]) { server.registerServices = function registerServices(...serviceList: any[]) {
serviceList.forEach(service => { serviceList.forEach(service => {
const sock = this.of(service.namespace); const sock = this.of(service.namespace);
sock.on('connection', conn => { sock.on('connection', conn => {
@ -536,7 +536,7 @@ export function RPCServer(params: {
}); });
}; };
(server as any).listen = function listen(port, bindAddress = undefined) { server.listen = function listen(port, bindAddress = undefined) {
httpServer.listen(port, bindAddress); httpServer.listen(port, bindAddress);
}; };

View File

@ -38,7 +38,7 @@
}, },
"principalAWSUserArn": { "principalAWSUserArn": {
"type": "string", "type": "string",
"pattern": "^arn:aws:iam::[0-9]{12}:user/(?!\\*)[\\w+=,.@ -/]{1,2017}$" "pattern": "^arn:aws:iam::[0-9]{12}:user/(?!\\*)[\\w+=,.@ -/]{1,64}$"
}, },
"principalAWSRoleArn": { "principalAWSRoleArn": {
"type": "string", "type": "string",
@ -360,9 +360,6 @@
"type": "string", "type": "string",
"const": "2012-10-17" "const": "2012-10-17"
}, },
"Id": {
"type": "string"
},
"Statement": { "Statement": {
"oneOf": [ "oneOf": [
{ {

View File

@ -28,7 +28,7 @@
}, },
"principalAWSUserArn": { "principalAWSUserArn": {
"type": "string", "type": "string",
"pattern": "^arn:aws:iam::[0-9]{12}:user/(?!\\*)[\\w+=,.@ -/]{1,2017}$" "pattern": "^arn:aws:iam::[0-9]{12}:user/(?!\\*)[\\w+=,.@ -/]{1,64}$"
}, },
"principalAWSRoleArn": { "principalAWSRoleArn": {
"type": "string", "type": "string",

View File

@ -12,39 +12,13 @@ import {
actionMapSSO, actionMapSSO,
actionMapSTS, actionMapSTS,
actionMapMetadata, actionMapMetadata,
actionMapScuba,
} from './utils/actionMaps'; } from './utils/actionMaps';
export const actionNeedQuotaCheck = { const _actionNeedQuotaCheck = {
objectPut: true, objectPut: true,
objectPutVersion: true,
objectPutPart: true, objectPutPart: true,
objectRestore: true,
}; };
/**
* This variable describes APIs that change the bytes
* stored, requiring quota updates
*/
export const actionWithDataDeletion = {
objectDelete: true,
objectDeleteVersion: true,
multipartDelete: true,
multiObjectDelete: true,
};
/**
* The function returns true if the current API call is a copy object
* and the action requires a quota evaluation logic, post retrieval
* of the object metadata.
* @param {string} action - the action being performed
* @param {string} currentApi - the current API being called
* @return {boolean} - whether the action requires a quota check
*/
export function actionNeedQuotaCheckCopy(action: string, currentApi: string) {
return action === 'objectGet' && (currentApi === 'objectCopy' || currentApi === 'objectPutCopyPart');
}
function _findAction(service: string, method: string) { function _findAction(service: string, method: string) {
switch (service) { switch (service) {
case 's3': case 's3':
@ -62,8 +36,6 @@ function _findAction(service: string, method: string) {
return actionMapSTS[method]; return actionMapSTS[method];
case 'metadata': case 'metadata':
return actionMapMetadata[method]; return actionMapMetadata[method];
case 'scuba':
return actionMapScuba[method];
default: default:
return undefined; return undefined;
} }
@ -133,10 +105,6 @@ function _buildArn(
return `arn:scality:metadata::${requesterInfo!.accountid}:` + return `arn:scality:metadata::${requesterInfo!.accountid}:` +
`${generalResource}/`; `${generalResource}/`;
} }
case 'scuba': {
return `arn:scality:scuba::${requesterInfo!.accountid}:` +
`${generalResource}${specificResource ? '/' + specificResource : ''}`;
}
default: default:
return undefined; return undefined;
} }
@ -205,7 +173,6 @@ export default class RequestContext {
_needTagEval: boolean; _needTagEval: boolean;
_foundAction?: string; _foundAction?: string;
_foundResource?: string; _foundResource?: string;
_objectLockRetentionDays?: number | null;
constructor( constructor(
headers: { [key: string]: string | string[] }, headers: { [key: string]: string | string[] },
@ -227,7 +194,6 @@ export default class RequestContext {
requestObjTags?: string, requestObjTags?: string,
existingObjTag?: string, existingObjTag?: string,
needTagEval?: false, needTagEval?: false,
objectLockRetentionDays?: number,
) { ) {
this._headers = headers; this._headers = headers;
this._query = query; this._query = query;
@ -256,12 +222,10 @@ export default class RequestContext {
this._securityToken = securityToken; this._securityToken = securityToken;
this._policyArn = policyArn; this._policyArn = policyArn;
this._action = action; this._action = action;
this._needQuota = actionNeedQuotaCheck[apiMethod] === true this._needQuota = _actionNeedQuotaCheck[apiMethod] === true;
|| actionWithDataDeletion[apiMethod] === true;
this._requestObjTags = requestObjTags || null; this._requestObjTags = requestObjTags || null;
this._existingObjTag = existingObjTag || null; this._existingObjTag = existingObjTag || null;
this._needTagEval = needTagEval || false; this._needTagEval = needTagEval || false;
this._objectLockRetentionDays = objectLockRetentionDays || null;
return this; return this;
} }
@ -293,7 +257,6 @@ export default class RequestContext {
requestObjTags: this._requestObjTags, requestObjTags: this._requestObjTags,
existingObjTag: this._existingObjTag, existingObjTag: this._existingObjTag,
needTagEval: this._needTagEval, needTagEval: this._needTagEval,
objectLockRetentionDays: this._objectLockRetentionDays,
}; };
return JSON.stringify(requestInfo); return JSON.stringify(requestInfo);
} }
@ -334,7 +297,6 @@ export default class RequestContext {
obj.requestObjTags, obj.requestObjTags,
obj.existingObjTag, obj.existingObjTag,
obj.needTagEval, obj.needTagEval,
obj.objectLockRetentionDays,
); );
} }
@ -738,24 +700,4 @@ export default class RequestContext {
getNeedTagEval() { getNeedTagEval() {
return this._needTagEval; return this._needTagEval;
} }
/**
* Get object lock retention days
*
* @returns objectLockRetentionDays - object lock retention days
*/
getObjectLockRetentionDays() {
return this._objectLockRetentionDays;
}
/**
* Set object lock retention days
*
* @param objectLockRetentionDays - object lock retention days
* @returns itself
*/
setObjectLockRetentionDays(objectLockRetentionDays: number) {
this._objectLockRetentionDays = objectLockRetentionDays;
return this;
}
} }

View File

@ -310,7 +310,6 @@ export function evaluatePolicy(
} }
/** /**
* @deprecated Upgrade to standardEvaluateAllPolicies
* Evaluate whether a request is permitted under a policy. * Evaluate whether a request is permitted under a policy.
* @param requestContext - Info necessary to * @param requestContext - Info necessary to
* evaluate permission * evaluate permission
@ -326,16 +325,6 @@ export function evaluateAllPolicies(
allPolicies: any[], allPolicies: any[],
log: Logger, log: Logger,
): string { ): string {
return standardEvaluateAllPolicies(requestContext, allPolicies, log).verdict;
}
export function standardEvaluateAllPolicies(
requestContext: RequestContext,
allPolicies: any[],
log: Logger,
): {
verdict: string;
isImplicit: boolean;
} {
log.trace('evaluating all policies'); log.trace('evaluating all policies');
let allow = false; let allow = false;
let allowWithTagCondition = false; let allowWithTagCondition = false;
@ -344,10 +333,7 @@ export function standardEvaluateAllPolicies(
const singlePolicyVerdict = evaluatePolicy(requestContext, allPolicies[i], log); const singlePolicyVerdict = evaluatePolicy(requestContext, allPolicies[i], log);
// If there is any Deny, just return Deny // If there is any Deny, just return Deny
if (singlePolicyVerdict === 'Deny') { if (singlePolicyVerdict === 'Deny') {
return { return 'Deny';
verdict: 'Deny',
isImplicit: false,
};
} }
if (singlePolicyVerdict === 'Allow') { if (singlePolicyVerdict === 'Allow') {
allow = true; allow = true;
@ -358,7 +344,6 @@ export function standardEvaluateAllPolicies(
} // else 'Neutral' } // else 'Neutral'
} }
let verdict; let verdict;
let isImplicit = false;
if (allow) { if (allow) {
if (denyWithTagCondition) { if (denyWithTagCondition) {
verdict = 'NeedTagConditionEval'; verdict = 'NeedTagConditionEval';
@ -370,9 +355,8 @@ export function standardEvaluateAllPolicies(
verdict = 'NeedTagConditionEval'; verdict = 'NeedTagConditionEval';
} else { } else {
verdict = 'Deny'; verdict = 'Deny';
isImplicit = true;
} }
} }
log.trace('result of evaluating all policies', { verdict, isImplicit }); log.trace('result of evaluating all policies', { verdict });
return { verdict, isImplicit }; return verdict;
} }

View File

@ -52,12 +52,6 @@ const sharedActionMap = {
objectPutVersion: 's3:PutObjectVersion', objectPutVersion: 's3:PutObjectVersion',
}; };
const actionMapBucketQuotas = {
bucketGetQuota: 'scality:GetBucketQuota',
bucketUpdateQuota: 'scality:UpdateBucketQuota',
bucketDeleteQuota: 'scality:DeleteBucketQuota',
};
// action map used for request context // action map used for request context
const actionMapRQ = { const actionMapRQ = {
bucketPut: 's3:CreateBucket', bucketPut: 's3:CreateBucket',
@ -71,7 +65,6 @@ const actionMapRQ = {
initiateMultipartUpload: 's3:PutObject', initiateMultipartUpload: 's3:PutObject',
objectDeleteVersion: 's3:DeleteObjectVersion', objectDeleteVersion: 's3:DeleteObjectVersion',
objectDeleteTaggingVersion: 's3:DeleteObjectVersionTagging', objectDeleteTaggingVersion: 's3:DeleteObjectVersionTagging',
objectGetArchiveInfo: 'scality:GetObjectArchiveInfo',
objectGetVersion: 's3:GetObjectVersion', objectGetVersion: 's3:GetObjectVersion',
objectGetACLVersion: 's3:GetObjectVersionAcl', objectGetACLVersion: 's3:GetObjectVersionAcl',
objectGetTaggingVersion: 's3:GetObjectVersionTagging', objectGetTaggingVersion: 's3:GetObjectVersionTagging',
@ -86,11 +79,10 @@ const actionMapRQ = {
objectPutLegalHoldVersion: 's3:PutObjectLegalHold', objectPutLegalHoldVersion: 's3:PutObjectLegalHold',
listObjectVersions: 's3:ListBucketVersions', listObjectVersions: 's3:ListBucketVersions',
...sharedActionMap, ...sharedActionMap,
...actionMapBucketQuotas,
}; };
// action map used for bucket policies // action map used for bucket policies
const actionMapBP = actionMapRQ; const actionMapBP = { ...sharedActionMap };
// action map for all relevant s3 actions // action map for all relevant s3 actions
const actionMapS3 = { const actionMapS3 = {
@ -159,15 +151,6 @@ const actionMonitoringMapS3 = {
objectPutTagging: 'PutObjectTagging', objectPutTagging: 'PutObjectTagging',
objectRestore: 'RestoreObject', objectRestore: 'RestoreObject',
serviceGet: 'ListBuckets', serviceGet: 'ListBuckets',
bucketGetQuota: 'GetBucketQuota',
bucketUpdateQuota: 'UpdateBucketQuota',
bucketDeleteQuota: 'DeleteBucketQuota',
};
const actionMapAccountQuotas = {
UpdateAccountQuota : 'scality:UpdateAccountQuota',
DeleteAccountQuota : 'scality:DeleteAccountQuota',
GetAccountQuota : 'scality:GetAccountQuota',
}; };
const actionMapIAM = { const actionMapIAM = {
@ -211,7 +194,6 @@ const actionMapIAM = {
tagUser: 'iam:TagUser', tagUser: 'iam:TagUser',
unTagUser: 'iam:UntagUser', unTagUser: 'iam:UntagUser',
listUserTags: 'iam:ListUserTags', listUserTags: 'iam:ListUserTags',
...actionMapAccountQuotas,
}; };
const actionMapSSO = { const actionMapSSO = {
@ -227,14 +209,6 @@ const actionMapMetadata = {
default: 'metadata:bucketd', default: 'metadata:bucketd',
}; };
const actionMapScuba = {
GetMetrics: 'scuba:GetMetrics',
AdminStartIngest: 'scuba:AdminStartIngest',
AdminStopIngest: 'scuba:AdminStopIngest',
AdminReadRaftCseq: 'scuba:AdminReadRaftCseq',
AdminTriggerRepair: 'scuba:AdminTriggerRepair',
};
export { export {
actionMapRQ, actionMapRQ,
actionMapBP, actionMapBP,
@ -244,5 +218,4 @@ export {
actionMapSSO, actionMapSSO,
actionMapSTS, actionMapSTS,
actionMapMetadata, actionMapMetadata,
actionMapScuba,
}; };

View File

@ -1,5 +1,5 @@
import { handleWildcardInResource } from './wildcards'; import { handleWildcardInResource } from './wildcards';
import { policyArnAllowedEmptyAccountId } from '../../constants';
/** /**
* Checks whether an ARN from a request matches an ARN in a policy * Checks whether an ARN from a request matches an ARN in a policy
* to compare against each portion of the ARN from the request * to compare against each portion of the ARN from the request
@ -38,10 +38,9 @@ export default function checkArnMatch(
const requestSegment = caseSensitive ? requestArnArr[j] : const requestSegment = caseSensitive ? requestArnArr[j] :
requestArnArr[j].toLowerCase(); requestArnArr[j].toLowerCase();
const policyArnArr = policyArn.split(':'); const policyArnArr = policyArn.split(':');
// We want to allow an empty account ID for utapi and scuba service ARNs to not // We want to allow an empty account ID for utapi service ARNs to not
// break compatibility. // break compatibility.
if (j === 4 && policyArnAllowedEmptyAccountId.includes(policyArnArr[2]) if (j === 4 && policyArnArr[2] === 'utapi' && policyArnArr[4] === '') {
&& policyArnArr[4] === '') {
continue; continue;
} else if (!segmentRegEx.test(requestSegment)) { } else if (!segmentRegEx.test(requestSegment)) {
return false; return false;

View File

@ -168,9 +168,6 @@ export function findConditionKey(
return requestContext.getNeedTagEval() && requestContext.getRequestObjTags() return requestContext.getNeedTagEval() && requestContext.getRequestObjTags()
? getTagKeys(requestContext.getRequestObjTags()!) ? getTagKeys(requestContext.getRequestObjTags()!)
: undefined; : undefined;
// The maximum retention period is 100 years.
case 's3:object-lock-remaining-retention-days':
return requestContext.getObjectLockRetentionDays() || undefined;
default: default:
return undefined; return undefined;
} }

View File

@ -2,9 +2,6 @@ import assert from 'assert';
import * as crypto from 'crypto'; import * as crypto from 'crypto';
import * as stream from 'stream'; import * as stream from 'stream';
import azure from '@azure/storage-blob'; import azure from '@azure/storage-blob';
import { RequestLogger } from 'werelogs';
import ResultsCollector from './ResultsCollector'; import ResultsCollector from './ResultsCollector';
import SubStreamInterface from './SubStreamInterface'; import SubStreamInterface from './SubStreamInterface';
import * as objectUtils from '../objectUtils'; import * as objectUtils from '../objectUtils';

View File

@ -1,25 +1,19 @@
import { scaleMsPerDay } from '../objectUtils'; const oneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day.
const msInOneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day.
export default class LifecycleDateTime { export default class LifecycleDateTime {
_transitionOneDayEarlier?: boolean; _transitionOneDayEarlier?: boolean;
_expireOneDayEarlier?: boolean; _expireOneDayEarlier?: boolean;
_timeProgressionFactor?: number;
_scaledMsPerDay: number;
constructor(params?: { constructor(params?: {
transitionOneDayEarlier: boolean; transitionOneDayEarlier: boolean;
expireOneDayEarlier: boolean; expireOneDayEarlier: boolean;
timeProgressionFactor: number;
}) { }) {
this._transitionOneDayEarlier = params?.transitionOneDayEarlier; this._transitionOneDayEarlier = params?.transitionOneDayEarlier;
this._expireOneDayEarlier = params?.expireOneDayEarlier; this._expireOneDayEarlier = params?.expireOneDayEarlier;
this._timeProgressionFactor = params?.timeProgressionFactor || 1;
this._scaledMsPerDay = scaleMsPerDay(this._timeProgressionFactor);
} }
getCurrentDate() { getCurrentDate() {
const timeTravel = this._expireOneDayEarlier ? msInOneDay : 0; const timeTravel = this._expireOneDayEarlier ? oneDay : 0;
return Date.now() + timeTravel; return Date.now() + timeTravel;
} }
@ -31,7 +25,7 @@ export default class LifecycleDateTime {
findDaysSince(date: Date) { findDaysSince(date: Date) {
const now = this.getCurrentDate(); const now = this.getCurrentDate();
const diff = now - date.getTime(); const diff = now - date.getTime();
return Math.floor(diff / this._scaledMsPerDay); return Math.floor(diff / (1000 * 60 * 60 * 24));
} }
/** /**
@ -58,8 +52,8 @@ export default class LifecycleDateTime {
} }
if (transition.Days !== undefined) { if (transition.Days !== undefined) {
const lastModifiedTime = this.getTimestamp(lastModified); const lastModifiedTime = this.getTimestamp(lastModified);
const timeTravel = this._transitionOneDayEarlier ? -msInOneDay : 0; const timeTravel = this._transitionOneDayEarlier ? -oneDay : 0;
return lastModifiedTime + (transition.Days * this._scaledMsPerDay) + timeTravel; return lastModifiedTime + (transition.Days * oneDay) + timeTravel;
} }
} }
@ -75,8 +69,8 @@ export default class LifecycleDateTime {
) { ) {
if (transition.NoncurrentDays !== undefined) { if (transition.NoncurrentDays !== undefined) {
const lastModifiedTime = this.getTimestamp(lastModified); const lastModifiedTime = this.getTimestamp(lastModified);
const timeTravel = this._transitionOneDayEarlier ? -msInOneDay : 0; const timeTravel = this._transitionOneDayEarlier ? -oneDay : 0;
return lastModifiedTime + (transition.NoncurrentDays * this._scaledMsPerDay) + timeTravel; return lastModifiedTime + (transition.NoncurrentDays * oneDay) + timeTravel;
} }
} }
} }

View File

@ -1,5 +1,3 @@
const msInOneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day.
export const getMD5Buffer = (base64MD5: WithImplicitCoercion<string> | Uint8Array) => export const getMD5Buffer = (base64MD5: WithImplicitCoercion<string> | Uint8Array) =>
base64MD5 instanceof Uint8Array ? base64MD5 : Buffer.from(base64MD5, 'base64') base64MD5 instanceof Uint8Array ? base64MD5 : Buffer.from(base64MD5, 'base64')
@ -8,14 +6,3 @@ export const getHexMD5 = (base64MD5: WithImplicitCoercion<string> | Uint8Array)
export const getBase64MD5 = (hexMD5: WithImplicitCoercion<string>) => export const getBase64MD5 = (hexMD5: WithImplicitCoercion<string>) =>
Buffer.from(hexMD5, 'hex').toString('base64'); Buffer.from(hexMD5, 'hex').toString('base64');
/**
* Calculates the number of scaled milliseconds per day based on the given time progression factor.
* This function is intended for testing and simulation purposes only.
* @param {number} timeProgressionFactor - The desired time progression factor for scaling.
* @returns {number} The number of scaled milliseconds per day.
* If the result is 0, the minimum value of 1 millisecond is returned.
*/
export const scaleMsPerDay = (timeProgressionFactor: number): number =>
Math.round(msInOneDay / (timeProgressionFactor || 1)) || 1;

View File

@ -1,7 +1,4 @@
import assert from 'assert'; import assert from 'assert';
import { RequestLogger } from 'werelogs';
import errors from '../errors'; import errors from '../errors';
import routeGET from './routes/routeGET'; import routeGET from './routes/routeGET';
import routePUT from './routes/routePUT'; import routePUT from './routes/routePUT';

View File

@ -1,5 +1,3 @@
import { RequestLogger } from 'werelogs';
import * as routesUtils from '../routesUtils'; import * as routesUtils from '../routesUtils';
import errors from '../../errors'; import errors from '../../errors';
import StatsClient from '../../metrics/StatsClient'; import StatsClient from '../../metrics/StatsClient';
@ -43,8 +41,6 @@ export default function routeDELETE(
return call('bucketDeleteEncryption'); return call('bucketDeleteEncryption');
} else if (query?.tagging !== undefined) { } else if (query?.tagging !== undefined) {
return call('bucketDeleteTagging'); return call('bucketDeleteTagging');
} else if (query?.quota !== undefined) {
return call('bucketDeleteQuota');
} }
call('bucketDelete'); call('bucketDelete');
} else { } else {

View File

@ -1,5 +1,3 @@
import { RequestLogger } from 'werelogs';
import * as routesUtils from '../routesUtils'; import * as routesUtils from '../routesUtils';
import errors from '../../errors'; import errors from '../../errors';
import * as http from 'http'; import * as http from 'http';
@ -60,8 +58,6 @@ export default function routerGET(
call('bucketGetEncryption'); call('bucketGetEncryption');
} else if (query.search !== undefined) { } else if (query.search !== undefined) {
call('metadataSearch') call('metadataSearch')
} else if (query.quota !== undefined) {
call('bucketGetQuota');
} else { } else {
// GET bucket // GET bucket
call('bucketGet'); call('bucketGet');

View File

@ -1,5 +1,3 @@
import { RequestLogger } from 'werelogs';
import * as routesUtils from '../routesUtils'; import * as routesUtils from '../routesUtils';
import errors from '../../errors'; import errors from '../../errors';
import StatsClient from '../../metrics/StatsClient'; import StatsClient from '../../metrics/StatsClient';

View File

@ -1,5 +1,3 @@
import { RequestLogger } from 'werelogs';
import * as routesUtils from '../routesUtils'; import * as routesUtils from '../routesUtils';
import errors from '../../errors'; import errors from '../../errors';
import * as http from 'http'; import * as http from 'http';

View File

@ -1,5 +1,3 @@
import { RequestLogger } from 'werelogs';
import * as routesUtils from '../routesUtils'; import * as routesUtils from '../routesUtils';
import errors from '../../errors'; import errors from '../../errors';
import * as http from 'http'; import * as http from 'http';

View File

@ -1,5 +1,3 @@
import { RequestLogger } from 'werelogs';
import * as routesUtils from '../routesUtils'; import * as routesUtils from '../routesUtils';
import errors from '../../errors'; import errors from '../../errors';
import * as http from 'http'; import * as http from 'http';
@ -105,13 +103,6 @@ export default function routePUT(
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 200, log); response, 200, log);
}); });
} else if (query.quota !== undefined) {
api.callApiMethod('bucketUpdateQuota', request, response,
log, (err, resHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, resHeaders, response,
200, log);
});
} else { } else {
// PUT bucket // PUT bucket
return api.callApiMethod('bucketPut', request, response, log, return api.callApiMethod('bucketPut', request, response, log,

View File

@ -1,5 +1,3 @@
import { RequestLogger } from 'werelogs';
import * as routesUtils from '../routesUtils'; import * as routesUtils from '../routesUtils';
import errors from '../../errors'; import errors from '../../errors';
import * as http from 'http'; import * as http from 'http';
@ -29,11 +27,6 @@ export default function routerWebsite(
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
// request being redirected // request being redirected
if (redirectInfo) { if (redirectInfo) {
if (err && redirectInfo.withError) {
return routesUtils.redirectRequestOnError(err,
'GET', redirectInfo, dataGetInfo, dataRetrievalParams,
response, resMetaHeaders, log)
}
// note that key might have been modified in websiteGet // note that key might have been modified in websiteGet
// api to add index document // api to add index document
return routesUtils.redirectRequest(redirectInfo, return routesUtils.redirectRequest(redirectInfo,
@ -64,11 +57,6 @@ export default function routerWebsite(
(err, resMetaHeaders, redirectInfo, key) => { (err, resMetaHeaders, redirectInfo, key) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
if (redirectInfo) { if (redirectInfo) {
if (err && redirectInfo.withError) {
return routesUtils.redirectRequestOnError(err,
'HEAD', redirectInfo, null, dataRetrievalParams,
response, resMetaHeaders, log)
}
return routesUtils.redirectRequest(redirectInfo, return routesUtils.redirectRequest(redirectInfo,
// TODO ARSN-217 encrypted does not exists in request.connection // TODO ARSN-217 encrypted does not exists in request.connection
// @ts-ignore // @ts-ignore

View File

@ -1,13 +1,10 @@
import * as url from 'url'; import * as url from 'url';
import * as http from 'http';
import { eachSeries } from 'async';
import { RequestLogger } from 'werelogs';
import * as ipCheck from '../ipCheck'; import * as ipCheck from '../ipCheck';
import errors, { ArsenalError } from '../errors'; import errors, { ArsenalError } from '../errors';
import * as constants from '../constants'; import * as constants from '../constants';
import { eachSeries } from 'async';
import DataWrapper from '../storage/data/DataWrapper'; import DataWrapper from '../storage/data/DataWrapper';
import * as http from 'http';
import StatsClient from '../metrics/StatsClient'; import StatsClient from '../metrics/StatsClient';
import { objectKeyByteLimit } from '../constants'; import { objectKeyByteLimit } from '../constants';
const jsutil = require('../jsutil'); const jsutil = require('../jsutil');
@ -694,8 +691,6 @@ export function streamUserErrorPage(
log: RequestLogger, log: RequestLogger,
) { ) {
setCommonResponseHeaders(corsHeaders, response, log); setCommonResponseHeaders(corsHeaders, response, log);
response.setHeader('x-amz-error-code', err.message);
response.setHeader('x-amz-error-message', err.description);
response.writeHead(err.code, { 'Content-type': 'text/html' }); response.writeHead(err.code, { 'Content-type': 'text/html' });
response.on('finish', () => { response.on('finish', () => {
// TODO ARSN-216 Fix logger // TODO ARSN-216 Fix logger
@ -878,7 +873,7 @@ export function redirectRequest(
} }
let redirectLocation = justPath ? `/${redirectKey}` : let redirectLocation = justPath ? `/${redirectKey}` :
`${redirectProtocol}://${redirectHostName}/${redirectKey}`; `${redirectProtocol}://${redirectHostName}/${redirectKey}`;
if (!redirectKey && redirectLocationHeader && redirectLocation !== '/') { if (!redirectKey && redirectLocationHeader) {
// remove hanging slash // remove hanging slash
redirectLocation = redirectLocation.slice(0, -1); redirectLocation = redirectLocation.slice(0, -1);
} }
@ -895,52 +890,6 @@ export function redirectRequest(
return undefined; return undefined;
} }
/**
* redirectRequestOnError - redirect with an error body
* @param err - arsenal error object
* @param method - HTTP method
* @param routingInfo - info for routing
* @param [routingInfo.withError] - flag to differentiate from routing rules
* @param [routingInfo.location] - location header
* @param dataLocations --
* - array of locations to get streams from backend
* @param retrieveDataParams - params to create instance of
* data retrieval function
* @param response - response object
* @param corsHeaders - CORS-related response headers
* @param log - Werelogs instance
*/
export function redirectRequestOnError(
err: ArsenalError,
method: 'HEAD' | 'GET',
routingInfo: {
withError: true;
location: string;
},
dataLocations: { size: string | number }[] | null,
retrieveDataParams: any,
response: http.ServerResponse,
corsHeaders: { [key: string]: string },
log: RequestLogger,
) {
response.setHeader('Location', routingInfo.location);
if (!dataLocations && err.is.Found) {
if (method === 'HEAD') {
return errorHeaderResponse(err, response, corsHeaders, log);
}
response.setHeader('x-amz-error-code', err.message);
response.setHeader('x-amz-error-message', err.description);
return errorHtmlResponse(err, false, '', response, corsHeaders, log);
}
// This is reached only for website error document (GET only)
const overrideErrorCode = err.flatten();
overrideErrorCode.code = 301;
return streamUserErrorPage(ArsenalError.unflatten(overrideErrorCode)!,
dataLocations || [], retrieveDataParams, response, corsHeaders, log);
}
/** /**
* Get bucket name and object name from the request * Get bucket name and object name from the request
* @param request - http request object * @param request - http request object

View File

@ -2,8 +2,6 @@ const async = require('async');
const PassThrough = require('stream').PassThrough; const PassThrough = require('stream').PassThrough;
const assert = require('assert'); const assert = require('assert');
const { Logger } = require('werelogs');
const errors = require('../../errors').default; const errors = require('../../errors').default;
const MD5Sum = require('../../s3middleware/MD5Sum').default; const MD5Sum = require('../../s3middleware/MD5Sum').default;
const NullStream = require('../../s3middleware/nullStream').default; const NullStream = require('../../s3middleware/nullStream').default;
@ -29,7 +27,6 @@ class DataWrapper {
this.metadata = metadata; this.metadata = metadata;
this.locStorageCheckFn = locStorageCheckFn; this.locStorageCheckFn = locStorageCheckFn;
this.vault = vault; this.vault = vault;
this.logger = new Logger('DataWrapper');
} }
put(cipherBundle, value, valueSize, keyContext, backendInfo, log, cb) { put(cipherBundle, value, valueSize, keyContext, backendInfo, log, cb) {
@ -130,7 +127,7 @@ class DataWrapper {
} }
delete(objectGetInfo, log, cb) { delete(objectGetInfo, log, cb) {
const callback = cb || (() => {}); const callback = cb || log.end;
const isMdModelVersion2 = typeof(objectGetInfo) === 'string'; const isMdModelVersion2 = typeof(objectGetInfo) === 'string';
const isRequiredStringKey = const isRequiredStringKey =
constants.clientsRequireStringKey[this.implName]; constants.clientsRequireStringKey[this.implName];
@ -179,9 +176,7 @@ class DataWrapper {
newObjDataStoreName)) { newObjDataStoreName)) {
return process.nextTick(cb); return process.nextTick(cb);
} }
const delLog = this.logger.newRequestLoggerFromSerializedUids( log.trace('initiating batch delete', {
log.getSerializedUids());
delLog.trace('initiating batch delete', {
keys: locations, keys: locations,
implName: this.implName, implName: this.implName,
method: 'batchDelete', method: 'batchDelete',
@ -207,21 +202,21 @@ class DataWrapper {
return false; return false;
}); });
if (shouldBatchDelete && keys.length > 1) { if (shouldBatchDelete && keys.length > 1) {
return this.client.batchDelete(backendName, { keys }, delLog, cb); return this.client.batchDelete(backendName, { keys }, log, cb);
} }
return async.eachLimit(locations, 5, (loc, next) => { return async.eachLimit(locations, 5, (loc, next) => {
process.nextTick(() => this.delete(loc, delLog, next)); process.nextTick(() => this.delete(loc, log, next));
}, },
err => { err => {
if (err) { if (err) {
delLog.end().error('batch delete failed', { error: err }); log.end().error('batch delete failed', { error: err });
// deletion of non-existing objects result in 204 // deletion of non-existing objects result in 204
if (err.code === 404) { if (err.code === 404) {
return cb(); return cb();
} }
return cb(err); return cb(err);
} }
delLog.end().trace('batch delete successfully completed'); log.end().trace('batch delete successfully completed');
return cb(); return cb();
}); });
} }

View File

@ -1,10 +1,10 @@
const { http, https } = require('httpagent'); const { http, https } = require('httpagent');
const url = require('url'); const url = require('url');
const AWS = require('aws-sdk'); const AWS = require('aws-sdk');
const Sproxy = require('sproxydclient');
const Hyperdrive = require('hdclient');
const HttpsProxyAgent = require('https-proxy-agent'); const HttpsProxyAgent = require('https-proxy-agent');
require("aws-sdk/lib/maintenance_mode_message").suppress = true;
const constants = require('../../constants'); const constants = require('../../constants');
const DataFileBackend = require('./file/DataFileInterface'); const DataFileBackend = require('./file/DataFileInterface');
const inMemory = require('./in_memory/datastore').backend; const inMemory = require('./in_memory/datastore').backend;
@ -25,13 +25,8 @@ function parseLC(config, vault) {
if (locationObj.type === 'file') { if (locationObj.type === 'file') {
clients[location] = new DataFileBackend(config); clients[location] = new DataFileBackend(config);
} }
if (locationObj.type === 'vitastor') {
const VitastorBackend = require('./vitastor/VitastorBackend');
clients[location] = new VitastorBackend(location, locationObj.details);
}
if (locationObj.type === 'scality') { if (locationObj.type === 'scality') {
if (locationObj.details.connector.sproxyd) { if (locationObj.details.connector.sproxyd) {
const Sproxy = require('sproxydclient');
clients[location] = new Sproxy({ clients[location] = new Sproxy({
bootstrap: locationObj.details.connector bootstrap: locationObj.details.connector
.sproxyd.bootstrap, .sproxyd.bootstrap,
@ -46,7 +41,6 @@ function parseLC(config, vault) {
}); });
clients[location].clientType = 'scality'; clients[location].clientType = 'scality';
} else if (locationObj.details.connector.hdclient) { } else if (locationObj.details.connector.hdclient) {
const Hyperdrive = require('hdclient');
clients[location] = new Hyperdrive.hdcontroller.HDProxydClient( clients[location] = new Hyperdrive.hdcontroller.HDProxydClient(
locationObj.details.connector.hdclient); locationObj.details.connector.hdclient);
clients[location].clientType = 'scality'; clients[location].clientType = 'scality';

View File

@ -5,7 +5,6 @@ const { parseTagFromQuery } = require('../../s3middleware/tagging');
const { externalBackendHealthCheckInterval } = require('../../constants'); const { externalBackendHealthCheckInterval } = require('../../constants');
const DataFileBackend = require('./file/DataFileInterface'); const DataFileBackend = require('./file/DataFileInterface');
const { createLogger, checkExternalBackend } = require('./external/utils'); const { createLogger, checkExternalBackend } = require('./external/utils');
const jsutil = require('../../jsutil');
class MultipleBackendGateway { class MultipleBackendGateway {
constructor(clients, metadata, locStorageCheckFn) { constructor(clients, metadata, locStorageCheckFn) {
@ -200,12 +199,11 @@ class MultipleBackendGateway {
uploadPart(request, streamingV4Params, stream, size, location, key, uploadPart(request, streamingV4Params, stream, size, location, key,
uploadId, partNumber, bucketName, log, cb) { uploadId, partNumber, bucketName, log, cb) {
const client = this.clients[location]; const client = this.clients[location];
const cbOnce = jsutil.once(cb);
if (client.uploadPart) { if (client.uploadPart) {
return this.locStorageCheckFn(location, size, log, err => { return this.locStorageCheckFn(location, size, log, err => {
if (err) { if (err) {
return cbOnce(err); return cb(err);
} }
return client.uploadPart(request, streamingV4Params, stream, return client.uploadPart(request, streamingV4Params, stream,
size, key, uploadId, partNumber, bucketName, log, size, key, uploadId, partNumber, bucketName, log,
@ -219,14 +217,14 @@ class MultipleBackendGateway {
'metric following object PUT failure', 'metric following object PUT failure',
{ error: error.message }); { error: error.message });
} }
return cbOnce(err); return cb(err);
}); });
} }
return cbOnce(null, partInfo); return cb(null, partInfo);
}); });
}); });
} }
return cbOnce(); return cb();
} }
listParts(key, uploadId, location, bucketName, partNumberMarker, maxParts, listParts(key, uploadId, location, bucketName, partNumberMarker, maxParts,

View File

@ -8,7 +8,6 @@ const getMetaHeaders =
const { prepareStream } = require('../../../s3middleware/prepareStream'); const { prepareStream } = require('../../../s3middleware/prepareStream');
const { createLogger, logHelper, removeQuotes, trimXMetaPrefix } = const { createLogger, logHelper, removeQuotes, trimXMetaPrefix } =
require('./utils'); require('./utils');
const jsutil = require('../../../jsutil');
const missingVerIdInternalError = errors.InternalError.customizeDescription( const missingVerIdInternalError = errors.InternalError.customizeDescription(
'Invalid state. Please ensure versioning is enabled ' + 'Invalid state. Please ensure versioning is enabled ' +
@ -318,11 +317,9 @@ class AwsClient {
uploadPart(request, streamingV4Params, stream, size, key, uploadId, uploadPart(request, streamingV4Params, stream, size, key, uploadId,
partNumber, bucketName, log, callback) { partNumber, bucketName, log, callback) {
let hashedStream = stream; let hashedStream = stream;
const cbOnce = jsutil.once(callback);
if (request) { if (request) {
const partStream = prepareStream(request, streamingV4Params, const partStream = prepareStream(request, streamingV4Params,
this._vault, log, cbOnce); this._vault, log, callback);
hashedStream = new MD5Sum(); hashedStream = new MD5Sum();
partStream.pipe(hashedStream); partStream.pipe(hashedStream);
} }
@ -336,7 +333,7 @@ class AwsClient {
if (err) { if (err) {
logHelper(log, 'error', 'err from data backend ' + logHelper(log, 'error', 'err from data backend ' +
'on uploadPart', err, this._dataStoreName, this.clientType); 'on uploadPart', err, this._dataStoreName, this.clientType);
return cbOnce(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`), `${this.type}: ${err.message}`),
); );
@ -350,7 +347,7 @@ class AwsClient {
dataStoreName: this._dataStoreName, dataStoreName: this._dataStoreName,
dataStoreETag: noQuotesETag, dataStoreETag: noQuotesETag,
}; };
return cbOnce(null, dataRetrievalInfo); return callback(null, dataRetrievalInfo);
}); });
} }

View File

@ -1,696 +0,0 @@
// Zenko CloudServer Vitastor data storage backend adapter
// Copyright (c) Vitaliy Filippov, 2019+
// License: VNPL-1.1 (see README.md for details)
const stream = require('stream');
const vitastor = require('vitastor');
const VOLUME_MAGIC = 'VstS3Vol';
const OBJECT_MAGIC = 'VstS3Obj';
const FLAG_DELETED = 2n;
type Volume = {
id: number,
partial_sectors: {
[key: string]: {
buffer: Buffer,
refs: number,
},
},
header: {
location: string,
bucket: string,
max_size: number,
create_ts: number,
used_ts: number,
size: number,
objects: number,
removed_objects: number,
object_bytes: number,
removed_bytes: number,
},
};
type ObjectHeader = {
size: number,
key: string,
part_num?: number,
};
class VitastorBackend
{
locationName: string;
config: {
pool_id: number,
metadata_image: string,
metadata_pool_id: number,
metadata_inode_num: number,
size_buckets: number[],
size_bucket_mul: number,
id_batch_size: number,
sector_size: number,
write_chunk_size: number,
read_chunk_size: number,
pack_objects: boolean,
// and also other parameters for vitastor itself
};
next_id: number;
alloc_id: number;
opened: boolean;
on_open: ((...args: any[]) => void)[] | null;
open_error: Error | null;
cli: any;
kv: any;
volumes: {
[bucket: string]: {
[max_size: string]: Volume,
},
};
volumes_by_id: {
[id: string]: Volume,
};
volume_delete_stats: {
[id: string]: {
count: number,
bytes: number,
},
};
constructor(locationName, config)
{
this.locationName = locationName;
this.config = config;
// validate config
this.config.pool_id = Number(this.config.pool_id) || 0;
if (!this.config.pool_id)
throw new Error('pool_id is required for Vitastor');
if (!this.config.metadata_image && (!this.config.metadata_pool_id || !this.config.metadata_inode_num))
throw new Error('metadata_image or metadata_inode is required for Vitastor');
if (!this.config.size_buckets || !this.config.size_buckets.length)
this.config.size_buckets = [ 32*1024, 128*1024, 512*1024, 2*1024, 8*1024 ];
this.config.size_bucket_mul = Number(this.config.size_bucket_mul) || 2;
this.config.id_batch_size = Number(this.config.id_batch_size) || 100;
this.config.sector_size = Number(this.config.sector_size) || 0;
if (this.config.sector_size < 4096)
this.config.sector_size = 4096;
this.config.write_chunk_size = Number(this.config.write_chunk_size) || 0;
if (this.config.write_chunk_size < this.config.sector_size)
this.config.write_chunk_size = 4*1024*1024; // 4 MB
this.config.read_chunk_size = Number(this.config.read_chunk_size) || 0;
if (this.config.read_chunk_size < this.config.sector_size)
this.config.read_chunk_size = 4*1024*1024; // 4 MB
this.config.pack_objects = !!this.config.pack_objects;
// state
this.next_id = 1;
this.alloc_id = 0;
this.opened = false;
this.on_open = null;
this.open_error = null;
this.cli = new vitastor.Client(config);
this.kv = new vitastor.KV(this.cli);
// we group objects into volumes by bucket and size
this.volumes = {};
this.volumes_by_id = {};
this.volume_delete_stats = {};
}
async _makeVolumeId()
{
if (this.next_id <= this.alloc_id)
{
return this.next_id++;
}
const id_key = 'id'+this.config.pool_id;
const [ err, prev ] = await new Promise<[ any, string ]>(ok => this.kv.get(id_key, (err, value) => ok([ err, value ])));
if (err && err != vitastor.ENOENT)
{
throw new Error(err);
}
const new_id = (parseInt(prev) || 0) + 1;
this.next_id = new_id;
this.alloc_id = this.next_id + this.config.id_batch_size - 1;
await new Promise((ok, no) => this.kv.set(id_key, this.alloc_id, err => (err ? no(new Error(err)) : ok(null)), cas_old => cas_old === prev));
return this.next_id;
}
async _getVolume(bucketName, size)
{
if (!this.opened)
{
if (this.on_open)
{
await new Promise(ok => this.on_open!.push(ok));
}
else
{
this.on_open = [];
if (this.config.metadata_image)
{
const img = new vitastor.Image(this.cli, this.config.metadata_image);
const info = await new Promise<{ pool_id: number, inode_num: number }>(ok => img.get_info(ok));
this.config.metadata_pool_id = info.pool_id;
this.config.metadata_inode_num = info.inode_num;
}
const kv_config = {};
for (const key in this.config)
{
if (key.substr(0, 3) === 'kv_')
kv_config[key] = this.config[key];
}
this.open_error = await new Promise(ok => this.kv.open(
this.config.metadata_pool_id, this.config.metadata_inode_num,
kv_config, err => ok(err ? new Error(err) : null)
));
this.opened = true;
this.on_open.map(cb => setImmediate(cb));
this.on_open = null;
}
}
if (this.open_error)
{
throw this.open_error;
}
let i;
for (i = 0; i < this.config.size_buckets.length && size >= this.config.size_buckets[i]; i++) {}
let s;
if (i < this.config.size_buckets.length)
s = this.config.size_buckets[i];
else if (this.config.size_bucket_mul > 1)
{
while (size >= s)
s = Math.floor(this.config.size_bucket_mul * s);
}
if (!this.volumes[bucketName])
{
this.volumes[bucketName] = {};
}
if (this.volumes[bucketName][s])
{
return this.volumes[bucketName][s];
}
const new_id = await this._makeVolumeId();
const new_vol = this.volumes[bucketName][s] = {
id: new_id,
// FIXME: partial_sectors should be written with CAS because otherwise we may lose quick deletes
partial_sectors: {},
header: {
location: this.locationName,
bucket: bucketName,
max_size: s,
create_ts: Date.now(),
used_ts: Date.now(),
size: this.config.sector_size, // initial position is right after header
objects: 0,
removed_objects: 0,
object_bytes: 0,
removed_bytes: 0,
},
};
this.volumes_by_id[new_id] = new_vol;
const header_text = JSON.stringify(this.volumes[bucketName][s].header);
const buf = Buffer.alloc(this.config.sector_size);
buf.write(VOLUME_MAGIC + header_text, 0);
await new Promise((ok, no) => this.cli.write(
this.config.pool_id, new_id, 0, buf, err => (err ? no(new Error(err)) : ok(null))
));
await new Promise((ok, no) => this.kv.set(
'vol_'+this.config.pool_id+'_'+new_id, header_text, err => (err ? no(new Error(err)) : ok(null)), cas_old => !cas_old
));
return new_vol;
}
toObjectGetInfo(objectKey, bucketName, storageLocation)
{
return null;
}
_bufferStart(vol, cur_pos, cur_size, cur_chunks, sector_refs)
{
if ((cur_pos % this.config.sector_size) ||
Math.floor((cur_pos + cur_size) / this.config.sector_size) == Math.floor(cur_pos / this.config.sector_size))
{
const sect_pos = Math.floor(cur_pos / this.config.sector_size) * this.config.sector_size;
const sect = vol.partial_sectors[sect_pos]
? vol.partial_sectors[sect_pos].buffer
: Buffer.alloc(this.config.sector_size);
if (this.config.pack_objects)
{
// Save only if <pack_objects>
if (!vol.partial_sectors[sect_pos])
vol.partial_sectors[sect_pos] = { buffer: sect, refs: 0 };
vol.partial_sectors[sect_pos].refs++;
sector_refs.push(sect_pos);
}
let off = cur_pos % this.config.sector_size;
let i = 0;
for (; i < cur_chunks.length; i++)
{
let copy_len = this.config.sector_size - off;
copy_len = copy_len > cur_chunks[i].length ? cur_chunks[i].length : copy_len;
cur_chunks[i].copy(sect, off, 0, copy_len);
off += copy_len;
if (copy_len < cur_chunks[i].length)
{
cur_chunks[i] = cur_chunks[i].slice(copy_len);
cur_size -= copy_len;
break;
}
else
cur_size -= cur_chunks[i].length;
}
cur_chunks.splice(0, i, sect);
cur_size += this.config.sector_size;
cur_pos = sect_pos;
}
return [ cur_pos, cur_size ];
}
_bufferEnd(vol, cur_pos, cur_size, cur_chunks, sector_refs, write_all)
{
const write_pos = cur_pos;
const write_chunks = cur_chunks;
let write_size = cur_size;
cur_chunks = [];
cur_pos += cur_size;
cur_size = 0;
let remain = (cur_pos % this.config.sector_size);
if (remain > 0)
{
cur_pos -= remain;
let last_sect = null;
if (write_all)
{
last_sect = vol.partial_sectors[cur_pos]
? vol.partial_sectors[cur_pos].buffer
: Buffer.alloc(this.config.sector_size);
if (this.config.pack_objects)
{
// Save only if <pack_objects>
if (!vol.partial_sectors[cur_pos])
vol.partial_sectors[cur_pos] = { buffer: last_sect, refs: 0 };
vol.partial_sectors[cur_pos].refs++;
sector_refs.push(cur_pos);
}
}
write_size -= remain;
if (write_size < 0)
write_size = 0;
for (let i = write_chunks.length-1; i >= 0 && remain > 0; i--)
{
if (write_chunks[i].length <= remain)
{
remain -= write_chunks[i].length;
if (write_all)
write_chunks[i].copy(last_sect, remain);
else
cur_chunks.unshift(write_chunks[i]);
write_chunks.pop();
}
else
{
if (write_all)
write_chunks[i].copy(last_sect, 0, write_chunks[i].length - remain);
else
cur_chunks.unshift(write_chunks[i].slice(write_chunks[i].length - remain));
write_chunks[i] = write_chunks[i].slice(0, write_chunks[i].length - remain);
remain = 0;
i++;
}
}
if (write_all)
{
write_chunks.push(last_sect);
write_size += this.config.sector_size;
}
}
for (const chunk of cur_chunks)
{
cur_size += chunk.length;
}
return [ write_pos, write_chunks, write_size, cur_pos, cur_size, cur_chunks ];
}
/**
* reqUids: string, // request-ids for log, usually joined by ':'
* keyContext: {
* // a lot of shit, basically all metadata
* bucketName,
* objectKey,
* owner?,
* namespace?,
* partNumber?,
* uploadId?,
* metaHeaders?,
* isDeleteMarker?,
* tagging?,
* contentType?,
* cacheControl?,
* contentDisposition?,
* contentEncoding?,
* },
* callback: (error, objectGetInfo: any) => void,
*/
put(stream, size, keyContext, reqUids, callback)
{
callback = once(callback);
this._getVolume(keyContext.bucketName, size)
.then(vol => this._put(vol, stream, size, keyContext, reqUids, callback))
.catch(callback);
}
_put(vol, stream, size, keyContext, reqUids, callback)
{
const object_header: ObjectHeader = {
size,
key: keyContext.objectKey,
};
if (keyContext.partNumber)
{
object_header.part_num = keyContext.partNumber;
}
// header is: <8 bytes magic> <8 bytes flags> <8 bytes json length> <json>
const hdr_begin_buf = Buffer.alloc(24);
const hdr_json_buf = Buffer.from(JSON.stringify(object_header), 'utf-8');
hdr_begin_buf.write(OBJECT_MAGIC);
hdr_begin_buf.writeBigInt64LE(BigInt(hdr_json_buf.length), 16);
const object_header_buf = Buffer.concat([ hdr_begin_buf, hdr_json_buf ]);
const object_pos = vol.header.size;
const object_get_info = { volume: vol.id, offset: object_pos, hdrlen: object_header_buf.length, size };
let cur_pos = object_pos;
let cur_chunks = [ object_header_buf ];
let cur_size = object_header_buf.length;
let err: Error|null = null;
let waiting = 1; // 1 for end or error, 1 for each write request
vol.header.size += object_header_buf.length + size;
if (!this.config.pack_objects && (vol.header.size % this.config.sector_size))
{
vol.header.size += this.config.sector_size - (vol.header.size % this.config.sector_size);
}
const writeChunk = (last) =>
{
const sector_refs = [];
// Handle partial beginning
[ cur_pos, cur_size ] = this._bufferStart(vol, cur_pos, cur_size, cur_chunks, sector_refs);
// Handle partial end
let write_pos, write_chunks, write_size;
[ write_pos, write_chunks, write_size, cur_pos, cur_size, cur_chunks ] = this._bufferEnd(vol, cur_pos, cur_size, cur_chunks, sector_refs, last);
waiting++;
// FIXME: pool_id: maybe it should be stored in volume metadata to allow to migrate volumes?
this.cli.write(this.config.pool_id, vol.id, write_pos, write_chunks, (res) =>
{
for (const sect of sector_refs)
{
vol.partial_sectors[sect].refs--;
if (!vol.partial_sectors[sect].refs &&
vol.header.size >= sect+this.config.sector_size)
{
// Forget partial data when it's not needed anymore
delete(vol.partial_sectors[sect]);
}
}
waiting--;
if (res)
{
err = new Error(res);
waiting--;
}
if (!waiting)
{
callback(err, err ? null : object_get_info);
}
});
};
// Stream data
stream.on('error', (e) =>
{
err = e;
waiting--;
if (!waiting)
{
callback(err, null);
}
});
stream.on('end', () =>
{
if (err)
{
return;
}
waiting--;
if (cur_size)
{
// write last chunk
writeChunk(true);
}
if (!waiting)
{
callback(null, object_get_info);
}
});
stream.on('data', (chunk) =>
{
if (err)
{
return;
}
cur_chunks.push(chunk);
cur_size += chunk.length;
if (cur_size >= this.config.write_chunk_size)
{
// got a complete chunk, write it out
writeChunk(false);
}
});
}
/**
* objectGetInfo: {
* key: { volume, offset, hdrlen, size }, // from put
* size,
* start,
* dataStoreName,
* dataStoreETag,
* range,
* response: ServerResponse,
* },
* range?: [ start, end ], // like in HTTP - first byte index, last byte index
* callback: (error, readStream) => void,
*/
get(objectGetInfo, range, reqUids, callback)
{
if (!(objectGetInfo instanceof Object) || !objectGetInfo.key ||
!(objectGetInfo.key instanceof Object) || !objectGetInfo.key.volume ||
!objectGetInfo.key.offset || !objectGetInfo.key.hdrlen || !objectGetInfo.key.size)
{
throw new Error('objectGetInfo must be { key: { volume, offset, hdrlen, size } }, but is '+JSON.stringify(objectGetInfo));
}
const [ start, end ] = range || [];
if (start < 0 || end < 0 || end != null && start != null && end < start || start >= objectGetInfo.key.size)
{
throw new Error('Invalid range: '+start+'-'+end);
}
let offset = objectGetInfo.key.offset + objectGetInfo.key.hdrlen + (start || 0);
let len = objectGetInfo.key.size - (start || 0);
if (end)
{
const len2 = end - (start || 0) + 1;
if (len2 < len)
len = len2;
}
callback(null, new VitastorReadStream(this.cli, objectGetInfo.key.volume, offset, len, this.config));
}
/**
* objectGetInfo: {
* key: { volume, offset, hdrlen, size }, // from put
* size,
* start,
* dataStoreName,
* dataStoreETag,
* range,
* response: ServerResponse,
* },
* callback: (error) => void,
*/
delete(objectGetInfo, reqUids, callback)
{
callback = once(callback);
this._delete(objectGetInfo, reqUids)
.then(callback)
.catch(callback);
}
async _delete(objectGetInfo, reqUids)
{
if (!(objectGetInfo instanceof Object) || !objectGetInfo.key ||
!(objectGetInfo.key instanceof Object) || !objectGetInfo.key.volume ||
!objectGetInfo.key.offset || !objectGetInfo.key.hdrlen || !objectGetInfo.key.size)
{
throw new Error('objectGetInfo must be { key: { volume, offset, hdrlen, size } }, but is '+JSON.stringify(objectGetInfo));
}
const in_sect_pos = (objectGetInfo.key.offset % this.config.sector_size);
const sect_pos = objectGetInfo.key.offset - in_sect_pos;
const vol = this.volumes_by_id[objectGetInfo.key.volume];
if (vol && vol.partial_sectors[sect_pos])
{
// The sector may still be written to in corner cases
const sect = vol.partial_sectors[sect_pos];
const flags = sect.buffer.readBigInt64LE(in_sect_pos + 8);
if (!(flags & FLAG_DELETED))
{
const del_stat = this.volume_delete_stats[vol.id] = (this.volume_delete_stats[vol.id] || { count: 0, bytes: 0 });
del_stat.count++;
del_stat.bytes += objectGetInfo.key.size;
sect.buffer.writeBigInt64LE(flags | FLAG_DELETED, in_sect_pos + 8);
sect.refs++;
const err = await new Promise<any>(ok => this.cli.write(this.config.pool_id, objectGetInfo.key.volume, sect_pos, sect.buffer, ok));
sect.refs--;
if (err)
{
sect.buffer.writeBigInt64LE(0n, in_sect_pos + 8);
throw new Error(err);
}
}
}
else
{
// RMW with CAS
const [ err, buf, version ] = await new Promise<[ any, Buffer, bigint ]>(ok => this.cli.read(
this.config.pool_id, objectGetInfo.key.volume, sect_pos, this.config.sector_size,
(err, buf, version) => ok([ err, buf, version ])
));
if (err)
{
throw new Error(err);
}
// FIXME What if JSON crosses sector boundary? Prevent it if we want to pack objects
const magic = buf.slice(in_sect_pos, in_sect_pos+8).toString();
const flags = buf.readBigInt64LE(in_sect_pos+8);
const json_len = Number(buf.readBigInt64LE(in_sect_pos+16));
let json_hdr;
if (in_sect_pos+24+json_len <= buf.length)
{
try
{
json_hdr = JSON.parse(buf.slice(in_sect_pos+24, in_sect_pos+24+json_len).toString());
}
catch (e)
{
}
}
if (magic !== OBJECT_MAGIC || !json_hdr || json_hdr.size !== objectGetInfo.key.size)
{
throw new Error(
'header of object with size '+objectGetInfo.key.size+
' bytes not found in volume '+objectGetInfo.key.volume+' at '+objectGetInfo.key.offset
);
}
else if (!(flags & FLAG_DELETED))
{
buf.writeBigInt64LE(flags | FLAG_DELETED, in_sect_pos + 8);
const err = await new Promise<any>(ok => this.cli.write(this.config.pool_id, objectGetInfo.key.volume, sect_pos, buf, { version: version+1n }, ok));
if (err == vitastor.EINTR)
{
// Retry
await this._delete(objectGetInfo, reqUids);
}
else if (err)
{
throw new Error(err);
}
else
{
// FIXME: Write deletion statistics to volumes
// FIXME: Implement defragmentation
const del_stat = this.volume_delete_stats[objectGetInfo.key.volume] = (this.volume_delete_stats[objectGetInfo.key.volume] || { count: 0, bytes: 0 });
del_stat.count++;
del_stat.bytes += objectGetInfo.key.size;
}
}
}
}
/**
* config: full zenko server config,
* callback: (error, stats) => void, // stats is the returned statistics in arbitrary format
*/
getDiskUsage(config, reqUids, callback)
{
// FIXME: Iterate all volumes and return its sizes and deletion statistics, or maybe just sizes
callback(null, {});
}
}
class VitastorReadStream extends stream.Readable
{
constructor(cli, volume_id, offset, len, config, options = undefined)
{
super(options);
this.cli = cli;
this.volume_id = volume_id;
this.offset = offset;
this.end = offset + len;
this.pos = offset;
this.config = config;
this._reading = false;
}
_read(n)
{
if (this._reading)
{
return;
}
// FIXME: Validate object header
const chunk_size = n && this.config.read_chunk_size < n ? n : this.config.read_chunk_size;
const read_offset = this.pos;
const round_offset = read_offset - (read_offset % this.config.sector_size);
let read_end = this.end <= read_offset+chunk_size ? this.end : read_offset+chunk_size;
const round_end = (read_end % this.config.sector_size)
? read_end + this.config.sector_size - (read_end % this.config.sector_size)
: read_end;
if (round_end <= this.end)
read_end = round_end;
this.pos = read_end;
if (read_end <= read_offset)
{
// EOF
this.push(null);
return;
}
this._reading = true;
this.cli.read(this.config.pool_id, this.volume_id, round_offset, round_end-round_offset, (err, buf, version) =>
{
this._reading = false;
if (err)
{
this.destroy(new Error(err));
return;
}
if (read_offset != round_offset || round_end != read_end)
{
buf = buf.subarray(read_offset-round_offset, buf.length-(round_end-read_end));
}
if (this.push(buf))
{
this._read(n);
}
});
}
}
function once(callback)
{
let called = false;
return function()
{
if (!called)
{
called = true;
callback.apply(null, arguments);
}
};
}
module.exports = VitastorBackend;

View File

@ -226,19 +226,6 @@ class MetadataWrapper {
}); });
} }
getBucketQuota(bucketName, log, cb) {
log.debug('getting bucket quota from metadata');
this.client.getBucketAttributes(bucketName, log, (err, data) => {
if (err) {
log.debug('error from metadata', { implName: this.implName,
error: err });
return cb(err);
}
const bucketInfo = BucketInfo.fromObj(data);
return cb(err, { quota: bucketInfo.getQuota() });
});
}
deleteBucket(bucketName, log, cb) { deleteBucket(bucketName, log, cb) {
log.debug('deleting bucket from metadata'); log.debug('deleting bucket from metadata');
this.client.deleteBucket(bucketName, log, err => { this.client.deleteBucket(bucketName, log, err => {
@ -292,25 +279,6 @@ class MetadataWrapper {
}); });
} }
getObjectsMD(bucketName, objNamesWithParams, log, cb) {
if (typeof this.client.getObjects !== 'function') {
log.debug('backend does not support get object metadata with batching', {
implName: this.implName,
});
return cb(errors.NotImplemented);
}
log.debug('getting objects from metadata', { objects: objNamesWithParams });
return this.client.getObjects(bucketName, objNamesWithParams, log, (err, data) => {
if (err) {
log.debug('error getting objects from metadata', { implName: this.implName, objects: objNamesWithParams,
err });
return cb(err);
}
log.debug('objects retrieved from metadata', { objects: objNamesWithParams });
return cb(err, data);
});
}
getObjectMD(bucketName, objName, params, log, cb) { getObjectMD(bucketName, objName, params, log, cb) {
log.debug('getting object from metadata'); log.debug('getting object from metadata');
this.client.getObject(bucketName, objName, params, log, (err, data) => { this.client.getObject(bucketName, objName, params, log, (err, data) => {
@ -324,7 +292,7 @@ class MetadataWrapper {
}); });
} }
deleteObjectMD(bucketName, objName, params, log, cb, originOp = 's3:ObjectRemoved:Delete') { deleteObjectMD(bucketName, objName, params, log, cb) {
log.debug('deleting object from metadata'); log.debug('deleting object from metadata');
this.client.deleteObject(bucketName, objName, params, log, err => { this.client.deleteObject(bucketName, objName, params, log, err => {
if (err) { if (err) {
@ -334,7 +302,7 @@ class MetadataWrapper {
} }
log.debug('object deleted from metadata'); log.debug('object deleted from metadata');
return cb(err); return cb(err);
}, originOp); });
} }
listObject(bucketName, listingParams, log, cb) { listObject(bucketName, listingParams, log, cb) {
@ -548,139 +516,6 @@ class MetadataWrapper {
return cb(); return cb();
}); });
} }
/**
* Put bucket indexes
*
* indexSpec format:
* [
* { key:[ { key: "", order: 1 } ... ], name: <id 1>, ... , < backend options> },
* ...
* { key:[ { key: "", order: 1 } ... ], name: <id n>, ... },
* ]
*
*
* @param {String} bucketName bucket name
* @param {Array<Object>} indexSpecs index specification
* @param {Object} log logger
* @param {Function} cb callback
* @return {undefined}
*/
putBucketIndexes(bucketName, indexSpecs, log, cb) {
log.debug('put bucket indexes');
if (typeof this.client.putBucketIndexes !== 'function') {
log.error('error from metadata', {
method: 'putBucketIndexes',
error: errors.NotImplemented,
implName: this.implName,
});
return cb(errors.NotImplemented);
}
return this.client.putBucketIndexes(bucketName, indexSpecs, log, err => {
if (err) {
log.debug('error from metadata', {
method: 'putBucketIndexes',
error: err,
implName: this.implName,
});
return cb(err);
}
return cb(null);
});
}
/**
* Delete bucket indexes
*
* indexSpec format:
* [
* { key:[ { key: "", order: 1 } ... ], name: <id 1>, ... , < backend options> },
* ...
* { key:[ { key: "", order: 1 } ... ], name: <id n>, ... },
* ]
*
*
* @param {String} bucketName bucket name
* @param {Array<Object>} indexSpecs index specification
* @param {Object} log logger
* @param {Function} cb callback
* @return {undefined}
*/
deleteBucketIndexes(bucketName, indexSpecs, log, cb) {
log.debug('delete bucket indexes');
if (typeof this.client.deleteBucketIndexes !== 'function') {
log.error('error from metadata', {
method: 'deleteBucketIndexes',
error: errors.NotImplemented,
implName: this.implName,
});
return cb(errors.NotImplemented);
}
return this.client.deleteBucketIndexes(bucketName, indexSpecs, log, err => {
if (err) {
log.error('error from metadata', {
method: 'deleteBucketIndexes',
error: err,
implName: this.implName,
});
return cb(err);
}
return cb(null);
});
}
getBucketIndexes(bucketName, log, cb) {
log.debug('get bucket indexes');
if (typeof this.client.getBucketIndexes !== 'function') {
log.debug('error from metadata', {
method: 'getBucketIndexes',
error: errors.NotImplemented,
implName: this.implName,
});
return cb(errors.NotImplemented);
}
return this.client.getBucketIndexes(bucketName, log, (err, res) => {
if (err) {
log.debug('error from metadata', {
method: 'getBucketIndexes',
error: err,
implName: this.implName,
});
return cb(err);
}
return cb(null, res);
});
}
getIndexingJobs(log, cb) {
if (typeof this.client.getIndexingJobs !== 'function') {
log.debug('error from metadata', {
method: 'getIndexingJobs',
error: errors.NotImplemented,
implName: this.implName,
});
return cb(errors.NotImplemented);
}
return this.client.getIndexingJobs(log, (err, res) => {
if (err) {
log.debug('error from metadata', {
method: 'getBucketIndexes',
error: err,
implName: this.implName,
});
return cb(err);
}
return cb(null, res);
});
}
} }
module.exports = MetadataWrapper; module.exports = MetadataWrapper;

View File

@ -1,5 +1,6 @@
const assert = require('assert'); const assert = require('assert');
const errors = require('../../../errors').default;
const BucketInfo = require('../../../models/BucketInfo').default; const BucketInfo = require('../../../models/BucketInfo').default;
class BucketClientInterface { class BucketClientInterface {
@ -111,14 +112,7 @@ class BucketClientInterface {
} }
listLifecycleObject(bucketName, params, log, cb) { listLifecycleObject(bucketName, params, log, cb) {
this.client.listObject(bucketName, log.getSerializedUids(), params, return process.nextTick(cb, errors.NotImplemented);
(err, data) => {
if (err) {
return cb(err);
}
return cb(null, JSON.parse(data));
});
return null;
} }
listMultipartUploads(bucketName, params, log, cb) { listMultipartUploads(bucketName, params, log, cb) {

View File

@ -326,7 +326,7 @@ class BucketFileInterface {
} }
listLifecycleObject(bucketName, params, log, cb) { listLifecycleObject(bucketName, params, log, cb) {
return this.internalListObject(bucketName, params, log, cb); return process.nextTick(cb, errors.NotImplemented);
} }
listMultipartUploads(bucketName, params, log, cb) { listMultipartUploads(bucketName, params, log, cb) {

View File

@ -189,7 +189,7 @@ class MongoClientInterface {
if (err) { if (err) {
this.logger.fatal('error writing usersBucket ' + this.logger.fatal('error writing usersBucket ' +
'attributes to metastore', 'attributes to metastore',
{ error: err }); { error: err });
throw (errors.InternalError); throw (errors.InternalError);
} }
return cb(); return cb();
@ -265,11 +265,14 @@ class MongoClientInterface {
shardCollection: `${this.database}.${bucketName}`, shardCollection: `${this.database}.${bucketName}`,
key: { _id: 1 }, key: { _id: 1 },
}; };
return this.adminDb.command(cmd, {}).then(() => cb()).catch(err => { return this.adminDb.command(cmd, {}, err => {
log.error( if (err) {
'createBucket: enabling sharding', log.error(
{ error: err }); 'createBucket: enabling sharding',
return cb(errors.InternalError); { error: err });
return cb(errors.InternalError);
}
return cb();
}); });
} }
return cb(); return cb();
@ -899,130 +902,35 @@ class MongoClientInterface {
return cb(errors.InternalError); return cb(errors.InternalError);
}); });
} }
/** /**
* Puts an object into a MongoDB collection. * Put object when versioning is not enabled
* Depending on the parameters, the object is either directly put into the collection * @param {Object} c bucket collection
* or the existing object is marked as deleted and a new object is inserted. * @param {String} bucketName bucket name
* * @param {String} objName object name
* @param {Object} collection - The MongoDB collection to put the object into. * @param {Object} objVal object metadata
* @param {string} bucketName - The name of the bucket the object belongs to. * @param {Object} params params
* @param {string} objName - The name of the object. * @param {Object} log logger
* @param {Object} value - The value of the object. * @param {Function} cb callback
* @param {Object} params - Additional parameters. * @return {undefined}
* @param {string} params.vFormat - object key format.
* @param {boolean} params.needOplogUpdate - If true, the object is directly put into the collection
* with updating the operation log.
* @param {Object} log - The logger to use.
* @param {Function} cb - The callback function to call when the operation is complete. It is called with an error
* if there is an issue with the operation.
* @returns {Promise} A promise that resolves when the operation is complete. The promise is rejected with an error
* if there is an issue with the operation.
*/ */
putObjectNoVer(collection, bucketName, objName, value, params, log, cb) { putObjectNoVer(c, bucketName, objName, objVal, params, log, cb) {
if (params?.needOplogUpdate) { const masterKey = formatMasterKey(objName, params.vFormat);
return this.putObjectNoVerWithOplogUpdate(collection, bucketName, objName, value, params, log, cb); c.updateOne({
} _id: masterKey,
const key = formatMasterKey(objName, params.vFormat); }, {
const putFilter = { _id: key };
return collection.updateOne(putFilter, {
$set: { $set: {
_id: key, _id: masterKey,
value, value: objVal,
}, },
}, { }, {
upsert: true, upsert: true,
}).then(() => cb()).catch(err => { }).then(() => cb()).catch((err) => {
log.error('putObjectNoVer: error putting obect with no versioning', { error: err.message }); log.error('putObjectNoVer: error putting obect with no versioning', { error: err.message });
return cb(errors.InternalError); return cb(errors.InternalError);
}); });
} }
/**
* Updates an object in a MongoDB collection without changing its version.
* If the object doesn't exist, it will be created (upsert is true for the second update operation).
* The operation is logged in the oplog.
*
* @param {Object} collection - The MongoDB collection to update the object in.
* @param {string} bucketName - The name of the bucket the object belongs to.
* @param {string} objName - The name of the object.
* @param {Object} value - The new value of the object.
* @param {Object} params - Additional parameters.
* @param {string} params.vFormat - object key format
* @param {string} params.originOp - origin operation
* @param {Object} log - The logger to use.
* @param {Function} cb - The callback function to call when the operation is complete.
* It is called with an error if there is an issue with the operation.
* @returns {void}
*/
putObjectNoVerWithOplogUpdate(collection, bucketName, objName, value, params, log, cb) {
const key = formatMasterKey(objName, params.vFormat);
const putFilter = { _id: key };
// filter used when finding and updating object
const findFilter = {
...putFilter,
$or: [
{ 'value.deleted': { $exists: false } },
{ 'value.deleted': { $eq: false } },
],
};
const updateDeleteFilter = {
...putFilter,
'value.deleted': true,
};
return async.waterfall([
// Adding delete flag when getting the object
// to avoid having race conditions.
next => collection.findOneAndUpdate(findFilter, {
$set: updateDeleteFilter,
}, {
upsert: false,
}).then(doc => {
if (!doc.value) {
log.error('internalPutObject: unable to find target object to update',
{ bucket: bucketName, object: key });
return next(errors.NoSuchKey);
}
const obj = doc.value;
const objMetadata = new ObjectMD(obj.value);
objMetadata.setOriginOp(params.originOp);
objMetadata.setDeleted(true);
return next(null, objMetadata.getValue());
}).catch(err => {
log.error('internalPutObject: error getting object',
{ bucket: bucketName, object: key, error: err.message });
return next(errors.InternalError);
}),
// We update the full object to get the whole object metadata
// in the oplog update event
(objMetadata, next) => collection.bulkWrite([
{
updateOne: {
filter: updateDeleteFilter,
update: {
$set: { _id: key, value: objMetadata },
},
upsert: false,
},
},
{
updateOne: {
filter: putFilter,
update: {
$set: { _id: key, value },
},
upsert: true,
},
},
], { ordered: true }).then(() => next(null)).catch(next),
], (err) => {
if (err) {
log.error('internalPutObject: error updating object',
{ bucket: bucketName, object: key, error: err.message });
return cb(errors.InternalError);
}
return cb();
});
}
/** /**
* Returns the putObjectVerCase function to use * Returns the putObjectVerCase function to use
* depending on params * depending on params
@ -1068,7 +976,8 @@ class MongoClientInterface {
return putObjectVer(c, bucketName, objName, objVal, _params, log, return putObjectVer(c, bucketName, objName, objVal, _params, log,
cb); cb);
} }
return this.putObjectNoVer(c, bucketName, objName, objVal, _params, log, cb); return this.putObjectNoVer(c, bucketName, objName, objVal,
_params, log, cb);
}); });
} }
@ -1135,86 +1044,6 @@ class MongoClientInterface {
], cb); ], cb);
} }
/**
* gets object metadata for a list of objects
* @param {String} bucketName bucket name
* @param {Array} objects array of objects
* @param {Object} log logger
* @param {Function} callback callback
* @return {undefined}
*/
getObjects(bucketName, objects, log, callback) {
const c = this.getCollection(bucketName);
let vFormat = null;
if (!Array.isArray(objects)) {
return callback(errors.InternalError.customizeDescription('objects must be an array'));
}
// We do not accept more than 1000 keys in a single request
if (objects.length > 1000) {
return callback(errors.InternalError.customizeDescription('cannot get more than 1000 objects'));
}
// Function to process each document
const processDoc = (doc, objName, params, key, cb) => {
const versionIdValue = params && params.versionId ? params.versionId : undefined;
if (!doc && versionIdValue) {
// If no document and a version ID is provided, return an error.
return cb(null, {
err: errors.NoSuchKey,
doc: null,
versionId: versionIdValue,
key,
});
}
// If no master found then object is either non existent or last
// version is delete marker
if (!doc || doc.value.isPHD) {
return this.getLatestVersion(c, objName, vFormat, log, (err, _doc) => cb(null, {
err,
doc: _doc || null,
versionId: versionIdValue,
key,
}));
}
MongoUtils.unserialize(doc.value);
return cb(null, {
err: null,
doc: doc.value,
versionId: versionIdValue,
key,
});
};
return this.getBucketVFormat(bucketName, log, (err, _vFormat) => {
if (err) {
return callback(err);
}
vFormat = _vFormat;
const keys = objects.map(({ key: objName, params }) => (params && params.versionId
? formatVersionKey(objName, params.versionId, vFormat)
: formatMasterKey(objName, vFormat)));
return c.find({
_id: { $in: keys },
$or: [
{ 'value.deleted': { $exists: false } },
{ 'value.deleted': { $eq: false } },
],
}).toArray().then(docs => {
// Create a Map to quickly find docs by their keys
const docByKey = new Map(docs.map(doc => [doc._id, doc]));
// Process each document using associated context (objName, params)
async.mapLimit(objects, constants.maxBatchingConcurrentOperations,
({ key: objName, params }, cb) => {
const key = params && params.versionId
? formatVersionKey(objName, params.versionId, vFormat)
: formatMasterKey(objName, vFormat);
const doc = docByKey.get(key);
processDoc(doc, objName, params, key, cb);
}, callback);
}).catch(err => {
callback(err);
});
});
}
/** /**
* This function return the latest version of an object * This function return the latest version of an object
* by getting all keys related to an object's versions, ordering them * by getting all keys related to an object's versions, ordering them
@ -1373,7 +1202,7 @@ class MongoClientInterface {
'value.isPHD': true, 'value.isPHD': true,
'value.versionId': mst.versionId, 'value.versionId': mst.versionId,
}; };
this.internalDeleteObject(c, bucketName, masterKey, filter, null, log, err => { this.internalDeleteObject(c, bucketName, masterKey, filter, log, err => {
if (err) { if (err) {
// the PHD master might get updated when a PUT is performed // the PHD master might get updated when a PUT is performed
// before the repair is done, we don't want to return an error // before the repair is done, we don't want to return an error
@ -1414,10 +1243,9 @@ class MongoClientInterface {
* @param {String} params.vFormat object key format * @param {String} params.vFormat object key format
* @param {Object} log logger * @param {Object} log logger
* @param {Function} cb callback * @param {Function} cb callback
* @param {String} [originOp=s3:ObjectRemoved:Delete] origin operation
* @return {undefined} * @return {undefined}
*/ */
deleteObjectVerMaster(c, bucketName, objName, params, log, cb, originOp = 's3:ObjectRemoved:Delete') { deleteObjectVerMaster(c, bucketName, objName, params, log, cb) {
const masterKey = formatMasterKey(objName, params.vFormat); const masterKey = formatMasterKey(objName, params.vFormat);
const versionKey = formatVersionKey(objName, params.versionId, params.vFormat); const versionKey = formatVersionKey(objName, params.versionId, params.vFormat);
const _vid = generateVersionId(this.replicationGroupId); const _vid = generateVersionId(this.replicationGroupId);
@ -1444,7 +1272,7 @@ class MongoClientInterface {
.then(() => next()) .then(() => next())
.catch(err => next(err)), .catch(err => next(err)),
// delete version // delete version
next => this.internalDeleteObject(c, bucketName, versionKey, {}, params, log, next => this.internalDeleteObject(c, bucketName, versionKey, {}, log,
err => { err => {
// we don't return an error in case we don't find // we don't return an error in case we don't find
// a version as we expect this case when dealing with // a version as we expect this case when dealing with
@ -1453,7 +1281,7 @@ class MongoClientInterface {
return next(null); return next(null);
} }
return next(err); return next(err);
}, originOp), }),
], err => { ], err => {
if (err) { if (err) {
log.error( log.error(
@ -1476,12 +1304,11 @@ class MongoClientInterface {
* @param {String} params.vFormat object key format * @param {String} params.vFormat object key format
* @param {Object} log logger * @param {Object} log logger
* @param {Function} cb callback * @param {Function} cb callback
* @param {String} [originOp=s3:ObjectRemoved:Delete] origin operation
* @return {undefined} * @return {undefined}
*/ */
deleteObjectVerNotMaster(c, bucketName, objName, params, log, cb, originOp = 's3:ObjectRemoved:Delete') { deleteObjectVerNotMaster(c, bucketName, objName, params, log, cb) {
const versionKey = formatVersionKey(objName, params.versionId, params.vFormat); const versionKey = formatVersionKey(objName, params.versionId, params.vFormat);
this.internalDeleteObject(c, bucketName, versionKey, {}, params, log, err => { this.internalDeleteObject(c, bucketName, versionKey, {}, log, err => {
if (err) { if (err) {
if (err.is.NoSuchKey) { if (err.is.NoSuchKey) {
log.error( log.error(
@ -1495,7 +1322,7 @@ class MongoClientInterface {
return cb(errors.InternalError); return cb(errors.InternalError);
} }
return cb(null); return cb(null);
}, originOp); });
} }
/** /**
@ -1511,10 +1338,9 @@ class MongoClientInterface {
* @param {String} params.vFormat object key format * @param {String} params.vFormat object key format
* @param {Object} log logger * @param {Object} log logger
* @param {Function} cb callback * @param {Function} cb callback
* @param {String} [originOp=s3:ObjectRemoved:Delete] origin operation
* @return {undefined} * @return {undefined}
*/ */
deleteObjectVer(c, bucketName, objName, params, log, cb, originOp = 's3:ObjectRemoved:Delete') { deleteObjectVer(c, bucketName, objName, params, log, cb) {
const masterKey = formatMasterKey(objName, params.vFormat); const masterKey = formatMasterKey(objName, params.vFormat);
async.waterfall([ async.waterfall([
next => { next => {
@ -1550,10 +1376,10 @@ class MongoClientInterface {
if (mst.value.isPHD || if (mst.value.isPHD ||
mst.value.versionId === params.versionId) { mst.value.versionId === params.versionId) {
return this.deleteObjectVerMaster(c, bucketName, objName, return this.deleteObjectVerMaster(c, bucketName, objName,
params, log, next, originOp); params, log, next);
} }
return this.deleteObjectVerNotMaster(c, bucketName, objName, return this.deleteObjectVerNotMaster(c, bucketName, objName,
params, log, next, originOp); params, log, next);
}, },
], cb); ], cb);
} }
@ -1567,12 +1393,11 @@ class MongoClientInterface {
* @param {String} params.vFormat object key format * @param {String} params.vFormat object key format
* @param {Object} log logger * @param {Object} log logger
* @param {Function} cb callback * @param {Function} cb callback
* @param {String} [originOp=s3:ObjectRemoved:Delete] origin operation
* @return {undefined} * @return {undefined}
*/ */
deleteObjectNoVer(c, bucketName, objName, params, log, cb, originOp = 's3:ObjectRemoved:Delete') { deleteObjectNoVer(c, bucketName, objName, params, log, cb) {
const masterKey = formatMasterKey(objName, params.vFormat); const masterKey = formatMasterKey(objName, params.vFormat);
this.internalDeleteObject(c, bucketName, masterKey, {}, params, log, err => { this.internalDeleteObject(c, bucketName, masterKey, {}, log, err => {
if (err) { if (err) {
// Should not return an error when no object is found // Should not return an error when no object is found
if (err.is.NoSuchKey) { if (err.is.NoSuchKey) {
@ -1584,7 +1409,7 @@ class MongoClientInterface {
return cb(errors.InternalError); return cb(errors.InternalError);
} }
return cb(null); return cb(null);
}, originOp); });
} }
/** /**
@ -1595,30 +1420,12 @@ class MongoClientInterface {
* @param {string} bucketName bucket name * @param {string} bucketName bucket name
* @param {string} key Key of the object to delete * @param {string} key Key of the object to delete
* @param {object} filter additional query filters * @param {object} filter additional query filters
* @param {object} params request params * @param {Logger}log logger instance
* @param {Logger} log logger instance
* @param {Function} cb callback containing error * @param {Function} cb callback containing error
* and BulkWriteResult * and BulkWriteResult
* @param {String} [originOp=s3:ObjectRemoved:Delete] origin operation
* @return {undefined} * @return {undefined}
*/ */
internalDeleteObject(collection, bucketName, key, filter, params, log, cb, originOp = 's3:ObjectRemoved:Delete') { internalDeleteObject(collection, bucketName, key, filter, log, cb) {
// filter used when deleting object
const deleteFilter = Object.assign({
_id: key,
}, filter);
if (params && params.doesNotNeedOpogUpdate) {
// If flag is true, directly delete object
return collection.deleteOne(deleteFilter)
.then(() => cb(null))
.catch(err => {
log.error('internalDeleteObject: error deleting object',
{ bucket: bucketName, object: key, error: err.message });
return cb(errors.InternalError);
});
}
// filter used when finding and updating object // filter used when finding and updating object
const findFilter = Object.assign({ const findFilter = Object.assign({
_id: key, _id: key,
@ -1627,12 +1434,12 @@ class MongoClientInterface {
{ 'value.deleted': { $eq: false } }, { 'value.deleted': { $eq: false } },
], ],
}, filter); }, filter);
// filter used when deleting object
const updateDeleteFilter = Object.assign({ const updateDeleteFilter = Object.assign({
'_id': key, '_id': key,
'value.deleted': true, 'value.deleted': true,
}, filter); }, filter);
return async.waterfall([ async.waterfall([
// Adding delete flag when getting the object // Adding delete flag when getting the object
// to avoid having race conditions. // to avoid having race conditions.
next => collection.findOneAndUpdate(findFilter, { next => collection.findOneAndUpdate(findFilter, {
@ -1650,7 +1457,7 @@ class MongoClientInterface {
} }
const obj = doc.value; const obj = doc.value;
const objMetadata = new ObjectMD(obj.value); const objMetadata = new ObjectMD(obj.value);
objMetadata.setOriginOp(originOp); objMetadata.setOriginOp('s3:ObjectRemoved:Delete');
objMetadata.setDeleted(true); objMetadata.setDeleted(true);
return next(null, objMetadata.getValue()); return next(null, objMetadata.getValue());
}).catch(err => { }).catch(err => {
@ -1696,10 +1503,9 @@ class MongoClientInterface {
* @param {String} params.versionId object version (optional) * @param {String} params.versionId object version (optional)
* @param {Object} log logger * @param {Object} log logger
* @param {Function} cb callback * @param {Function} cb callback
* @param {String} [originOp=s3:ObjectRemoved:Delete] origin operation
* @return {undefined} * @return {undefined}
*/ */
deleteObject(bucketName, objName, params, log, cb, originOp = 's3:ObjectRemoved:Delete') { deleteObject(bucketName, objName, params, log, cb) {
const c = this.getCollection(bucketName); const c = this.getCollection(bucketName);
const _params = Object.assign({}, params); const _params = Object.assign({}, params);
return this.getBucketVFormat(bucketName, log, (err, vFormat) => { return this.getBucketVFormat(bucketName, log, (err, vFormat) => {
@ -1709,10 +1515,10 @@ class MongoClientInterface {
_params.vFormat = vFormat; _params.vFormat = vFormat;
if (_params && _params.versionId) { if (_params && _params.versionId) {
return this.deleteObjectVer(c, bucketName, objName, return this.deleteObjectVer(c, bucketName, objName,
_params, log, cb, originOp); _params, log, cb);
} }
return this.deleteObjectNoVer(c, bucketName, objName, return this.deleteObjectNoVer(c, bucketName, objName,
_params, log, cb, originOp); _params, log, cb);
}); });
} }
@ -1734,13 +1540,10 @@ class MongoClientInterface {
internalListObject(bucketName, params, extension, vFormat, log, cb) { internalListObject(bucketName, params, extension, vFormat, log, cb) {
const c = this.getCollection(bucketName); const c = this.getCollection(bucketName);
const getLatestVersion = this.getLatestVersion; const getLatestVersion = this.getLatestVersion;
const cbOnce = jsutil.once(cb);
let stream; let stream;
let baseStream;
if (!params.secondaryStreamParams) { if (!params.secondaryStreamParams) {
// listing masters only (DelimiterMaster) // listing masters only (DelimiterMaster)
stream = new MongoReadStream(c, params.mainStreamParams, params.mongifiedSearch); stream = new MongoReadStream(c, params.mainStreamParams, params.mongifiedSearch);
baseStream = stream;
if (vFormat === BUCKET_VERSIONS.v1) { if (vFormat === BUCKET_VERSIONS.v1) {
/** /**
* When listing masters only in v1 we can't just skip PHD * When listing masters only in v1 we can't just skip PHD
@ -1782,22 +1585,6 @@ class MongoClientInterface {
}, },
}); });
stream = stream.pipe(resolvePhdKey); stream = stream.pipe(resolvePhdKey);
// Propagate the 'end' event from resolvePhdKey to stream
// to properly cleanup resources.
resolvePhdKey.on('end', () => {
baseStream.emit('end');
});
baseStream.on('error', err => {
const logObj = {
rawError: err,
error: err.message,
errorStack: err.stack,
};
log.error(
'internalListObjectV1: error listing objects', logObj);
baseStream.destroy();
return cbOnce(err);
});
} }
} else { } else {
// listing both master and version keys (delimiterVersion Algo) // listing both master and version keys (delimiterVersion Algo)
@ -1812,6 +1599,7 @@ class MongoClientInterface {
extension, extension,
gte: gteParams, gte: gteParams,
}); });
const cbOnce = jsutil.once(cb);
skip.setListingEndCb(() => { skip.setListingEndCb(() => {
stream.emit('end'); stream.emit('end');
stream.destroy(); stream.destroy();
@ -1848,14 +1636,10 @@ class MongoClientInterface {
}; };
log.error( log.error(
'internalListObjectV1: error listing objects', logObj); 'internalListObjectV1: error listing objects', logObj);
// call explicitly the destroy method to clean the mongodb cursor
stream.destroy();
cbOnce(err); cbOnce(err);
}) })
.on('end', () => { .on('end', () => {
const data = extension.result(); const data = extension.result();
// call explicitly the destroy method to clean the mongodb cursor
stream.destroy();
cbOnce(null, data); cbOnce(null, data);
}); });
return undefined; return undefined;
@ -1915,14 +1699,19 @@ class MongoClientInterface {
return cb(err); return cb(err);
} }
if (vFormat !== BUCKET_VERSIONS.v1) {
log.error('not supported bucket format version',
{ method: 'listLifecycleObject', bucket: bucketName, vFormat });
return cb(errors.NotImplemented.customizeDescription('Not supported bucket format version'));
}
const extName = params.listingType; const extName = params.listingType;
const extension = new listAlgos[extName](params, log, vFormat); const extension = new listAlgos[extName](params, log, vFormat);
const extensionParams = extension.genMDParams(); const mainStreamParams = extension.genMDParams();
const internalParams = { const internalParams = {
mainStreamParams: Array.isArray(extensionParams) ? extensionParams[0] : extensionParams, mainStreamParams,
secondaryStreamParams: Array.isArray(extensionParams) ? extensionParams[1] : null,
}; };
return this.internalListObject(bucketName, internalParams, extension, vFormat, log, cb); return this.internalListObject(bucketName, internalParams, extension, vFormat, log, cb);
@ -2132,12 +1921,12 @@ class MongoClientInterface {
bucketInfos, bucketInfos,
}); });
})).catch(err => { })).catch(err => {
log.error('could not get list of collections', { log.error('could not get list of collections', {
method: '_getBucketInfos', method: '_getBucketInfos',
error: err, error: err,
});
return cb(err);
}); });
return cb(err);
});
} }
countItems(log, cb) { countItems(log, cb) {
@ -2150,20 +1939,14 @@ class MongoClientInterface {
return cb(err); return cb(err);
} }
const { bucketCount, bucketInfos } = res; const { bucketCount, bucketInfos } = res;
let bucketWithQuotaCount = 0;
const retBucketInfos = bucketInfos.map(bucket => { const retBucketInfos = bucketInfos.map(bucket => ({
if (bucket.getQuota()) { name: bucket.getName(),
bucketWithQuotaCount++; location: bucket.getLocationConstraint(),
} isVersioned: !!bucket.getVersioningConfiguration(),
return { ownerCanonicalId: bucket.getOwner(),
name: bucket.getName(), ingestion: bucket.isIngestionBucket(),
location: bucket.getLocationConstraint(), }));
isVersioned: !!bucket.getVersioningConfiguration(),
ownerCanonicalId: bucket.getOwner(),
ingestion: bucket.isIngestionBucket(),
};
});
return this.readCountItems(log, (err, results) => { return this.readCountItems(log, (err, results) => {
if (err) { if (err) {
@ -2173,7 +1956,6 @@ class MongoClientInterface {
/* eslint-disable */ /* eslint-disable */
results.bucketList = retBucketInfos; results.bucketList = retBucketInfos;
results.buckets = bucketCount; results.buckets = bucketCount;
results.bucketWithQuotaCount = bucketWithQuotaCount;
/* eslint-enable */ /* eslint-enable */
return cb(null, results); return cb(null, results);
}); });
@ -2236,12 +2018,13 @@ class MongoClientInterface {
store.buckets = bucketCount; store.buckets = bucketCount;
store.bucketList = retBucketInfos; store.bucketList = retBucketInfos;
console.log('before eachLimit');
return async.eachLimit(bucketInfos, this.concurrentCursors, return async.eachLimit(bucketInfos, this.concurrentCursors,
(bucketInfo, done) => { (bucketInfo, done) => {
async.waterfall([ async.waterfall([
next => this._getIsTransient(bucketInfo, log, next), next => this._getIsTransient(bucketInfo, log, next),
(isTransient, next) => { (isTransient, next) => {
console.log('in _getIsTransient eachLimit');
const bucketName = bucketInfo.getName(); const bucketName = bucketInfo.getName();
this.getObjectMDStats(bucketName, bucketInfo, this.getObjectMDStats(bucketName, bucketInfo,
isTransient, log, next); isTransient, log, next);
@ -2262,8 +2045,10 @@ class MongoClientInterface {
if (err) { if (err) {
return cb(err); return cb(err);
} }
console.log('before updateCountItems');
// save to infostore // save to infostore
return this.updateCountItems(store, log, err => { return this.updateCountItems(store, log, err => {
console.log('after updateCountItems');
if (err) { if (err) {
log.error('error saving count items in mongo', { log.error('error saving count items in mongo', {
method: 'scanItemCount', method: 'scanItemCount',
@ -2458,10 +2243,12 @@ class MongoClientInterface {
let stalledCount = 0; let stalledCount = 0;
const cmpDate = new Date(); const cmpDate = new Date();
cmpDate.setHours(cmpDate.getHours() - 1); cmpDate.setHours(cmpDate.getHours() - 1);
console.log('before cursor forEach');
cursor.forEach( cursor.forEach(
res => { res => {
const { data, error } = this._processEntryData(res, isTransient); const { data, error } = this._processEntryData(res, isTransient);
console.log('entry of cursor', data, error);
if (error) { if (error) {
log.error('Failed to process entry data', { log.error('Failed to process entry data', {
method: 'getObjectMDStats', method: 'getObjectMDStats',
@ -2498,13 +2285,17 @@ class MongoClientInterface {
collRes[targetData][site] = data[site]; collRes[targetData][site] = data[site];
} }
}); });
}).then(() => { }
).then(() => {
const bucketStatus = bucketInfo.getVersioningConfiguration(); const bucketStatus = bucketInfo.getVersioningConfiguration();
const isVer = (bucketStatus && const isVer = (bucketStatus &&
(bucketStatus.Status === 'Enabled' || (bucketStatus.Status === 'Enabled' ||
bucketStatus.Status === 'Suspended')); bucketStatus.Status === 'Suspended'));
console.log('before _handleResults');
const retResult = this._handleResults(collRes, isVer); const retResult = this._handleResults(collRes, isVer);
cursor.close();
retResult.stalled = stalledCount; retResult.stalled = stalledCount;
console.log('after _handleResults');
return callback(null, retResult); return callback(null, retResult);
}).catch(err => { }).catch(err => {
log.error('Error when processing mongo entries', { log.error('Error when processing mongo entries', {
@ -2559,7 +2350,7 @@ class MongoClientInterface {
}); });
return cb(errors.InternalError); return cb(errors.InternalError);
} }
return this.internalDeleteObject(c, bucketName, masterKey, filter, null, log, return this.internalDeleteObject(c, bucketName, masterKey, filter, log,
err => { err => {
if (err) { if (err) {
// unable to find an object that matches the conditions // unable to find an object that matches the conditions
@ -2640,108 +2431,6 @@ class MongoClientInterface {
}); });
}); });
} }
/**
* Puts bucket indexes
* @param {String} bucketName bucket name
* @param {Array<Object>} indexSpecs index specification
* @param {Object} log logger
* @param {Function} cb callback
* @return {undefined}
*/
putBucketIndexes(bucketName, indexSpecs, log, cb) {
const c = this.getCollection(bucketName);
const indexes = MongoUtils.indexFormatObjectToMongoArray(indexSpecs);
c.createIndexes(indexes).then(() => cb(null)).catch(err => {
if (err.codeName === 'NamespaceNotFound') {
return cb(errors.NoSuchBucket);
}
log.error(
'putBucketIndexes: error creating bucket indexes',
{ error: err });
return cb(errors.InternalError);
});
}
/**
* Delete bucket indexes
* @param {String} bucketName bucket name
* @param {Array<Object>} indexSpecs index specification
* @param {Object} log logger
* @param {Function} cb callback
* @return {undefined}
*/
deleteBucketIndexes(bucketName, indexSpecs, log, cb) {
const c = this.getCollection(bucketName);
async.each(indexSpecs,
(spec, next) => c.dropIndex(spec.name).then(() => next()).catch(err => next(err)),
err => {
if (err) {
if (err.codeName === 'NamespaceNotFound') {
return cb(errors.NoSuchBucket);
}
log.error(
'deleteBucketIndexes: error deleting bucket indexes',
{ error: err });
return cb(errors.InternalError);
}
return cb(null);
});
}
/**
* Gets bucket indexes
* @param {String} bucketName bucket name
* @param {Object} log logger
* @param {Function} cb callback
* @return {undefined}
*/
getBucketIndexes(bucketName, log, cb) {
const c = this.getCollection(bucketName);
c.listIndexes()
.toArray()
.then(res => cb(null, MongoUtils.indexFormatMongoArrayToObject(res)))
.catch(err => {
if (err.codeName === 'NamespaceNotFound') {
return cb(errors.NoSuchBucket);
}
log.error('getBucketIndexes: error retrieving bucket indexes', {
error: err,
});
return cb(errors.InternalError);
});
}
getIndexingJobs(log, cb) {
// list active createIndexes jobs
this.adminDb.command({
currentOp: true,
$or: [
{ 'op': 'command', 'command.createIndexes': { $exists: true } },
{ op: 'none', msg: /^Index Build/ },
],
}).then(res => {
const jobs = [];
for (const j of res.inprog) {
jobs.push({
bucket: j.command.createIndexes,
indexes: MongoUtils.indexFormatMongoArrayToObject(j.command.indexes),
});
}
return cb(null, jobs);
})
.catch(err => {
log.error('getIndexingJobs: error retrieving current index jobs', {
error: err,
});
return cb(err);
});
}
} }
module.exports = MongoClientInterface; module.exports = MongoClientInterface;

View File

@ -63,14 +63,6 @@ class MongoReadStream extends Readable {
} }
} }
if (options.dataStoreName) {
query['value.dataStoreName'] = {};
if (options.dataStoreName.ne) {
query['value.dataStoreName'].$ne = options.dataStoreName.ne;
}
}
if (!Object.keys(query._id).length) { if (!Object.keys(query._id).length) {
delete query._id; delete query._id;
} }
@ -85,8 +77,7 @@ class MongoReadStream extends Readable {
Object.assign(query, searchOptions); Object.assign(query, searchOptions);
} }
const projection = { 'value.location': 0 }; this._cursor = c.find(query).sort({
this._cursor = c.find(query, { projection }).sort({
_id: options.reverse ? -1 : 1, _id: options.reverse ? -1 : 1,
}); });
if (options.limit && options.limit !== -1) { if (options.limit && options.limit !== -1) {

View File

@ -185,48 +185,6 @@ function formatVersionKey(key, versionId, vFormat) {
return formatVersionKeyV0(key, versionId); return formatVersionKeyV0(key, versionId);
} }
function indexFormatMongoArrayToObject(mongoIndexArray) {
const indexObj = [];
for (const idx of mongoIndexArray) {
const keys = [];
let entries = [];
if (idx.key instanceof Map) {
entries = idx.key.entries();
} else {
entries = Object.entries(idx.key);
}
for (const k of entries) {
keys.push({ key: k[0], order: k[1] });
}
indexObj.push({ name: idx.name, keys });
}
return indexObj;
}
function indexFormatObjectToMongoArray(indexObj) {
const mongoIndexArray = [];
for (const idx of indexObj) {
const key = new Map();
for (const k of idx.keys) {
key.set(k.key, k.order);
}
// copy all field except keys from idx
// eslint-disable-next-line
const { keys: _, ...toCopy } = idx;
mongoIndexArray.push(Object.assign(toCopy, { name: idx.name, key }));
}
return mongoIndexArray;
}
module.exports = { module.exports = {
credPrefix, credPrefix,
@ -237,6 +195,4 @@ module.exports = {
translateConditions, translateConditions,
formatMasterKey, formatMasterKey,
formatVersionKey, formatVersionKey,
indexFormatMongoArrayToObject,
indexFormatObjectToMongoArray,
}; };

View File

@ -10,21 +10,21 @@ function trySetDirSyncFlag(path) {
const GETFLAGS = 2148034049; const GETFLAGS = 2148034049;
const SETFLAGS = 1074292226; const SETFLAGS = 1074292226;
const FS_DIRSYNC_FL = 65536n; const FS_DIRSYNC_FL = 65536;
const buffer = Buffer.alloc(8, 0); const buffer = Buffer.alloc(8, 0);
const pathFD = fs.openSync(path, 'r'); const pathFD = fs.openSync(path, 'r');
const status = ioctl(pathFD, GETFLAGS, buffer); const status = ioctl(pathFD, GETFLAGS, buffer);
assert.strictEqual(status, 0); assert.strictEqual(status, 0);
const currentFlags = buffer.readBigInt64LE(0); const currentFlags = buffer.readUIntLE(0, 8);
const flags = currentFlags | FS_DIRSYNC_FL; const flags = currentFlags | FS_DIRSYNC_FL;
buffer.writeBigInt64LE(flags, 0); buffer.writeUIntLE(flags, 0, 8);
const status2 = ioctl(pathFD, SETFLAGS, buffer); const status2 = ioctl(pathFD, SETFLAGS, buffer);
assert.strictEqual(status2, 0); assert.strictEqual(status2, 0);
fs.closeSync(pathFD); fs.closeSync(pathFD);
const pathFD2 = fs.openSync(path, 'r'); const pathFD2 = fs.openSync(path, 'r');
const confirmBuffer = Buffer.alloc(8, 0); const confirmBuffer = Buffer.alloc(8, 0);
ioctl(pathFD2, GETFLAGS, confirmBuffer); ioctl(pathFD2, GETFLAGS, confirmBuffer);
assert.strictEqual(confirmBuffer.readBigInt64LE(0), assert.strictEqual(confirmBuffer.readUIntLE(0, 8),
currentFlags | FS_DIRSYNC_FL, 'FS_DIRSYNC_FL not set'); currentFlags | FS_DIRSYNC_FL, 'FS_DIRSYNC_FL not set');
fs.closeSync(pathFD2); fs.closeSync(pathFD2);
} }

View File

@ -3,7 +3,7 @@ import { VersioningConstants } from './constants';
const VID_SEP = VersioningConstants.VersionId.Separator; const VID_SEP = VersioningConstants.VersionId.Separator;
/** /**
* Class for manipulating an object version. * Class for manipulating an object version.
* The format of a version: { isNull, isNull2, isDeleteMarker, versionId, otherInfo } * The format of a version: { isNull, isDeleteMarker, versionId, otherInfo }
* *
* @note Some of these functions are optimized based on string search * @note Some of these functions are optimized based on string search
* prior to a full JSON parse/stringify. (Vinh: 18K op/s are achieved * prior to a full JSON parse/stringify. (Vinh: 18K op/s are achieved
@ -13,31 +13,24 @@ const VID_SEP = VersioningConstants.VersionId.Separator;
export class Version { export class Version {
version: { version: {
isNull?: boolean; isNull?: boolean;
isNull2?: boolean;
isDeleteMarker?: boolean; isDeleteMarker?: boolean;
versionId?: string; versionId?: string;
isPHD?: boolean; isPHD?: boolean;
nullVersionId?: string;
}; };
/** /**
* Create a new version instantiation from its data object. * Create a new version instantiation from its data object.
* @param version - the data object to instantiate * @param version - the data object to instantiate
* @param version.isNull - is a null version * @param version.isNull - is a null version
* @param version.isNull2 - Whether new version is null or not AND has
* been put with a Cloudserver handling null keys (i.e. supporting
* S3C-7352)
* @param version.isDeleteMarker - is a delete marker * @param version.isDeleteMarker - is a delete marker
* @param version.versionId - the version id * @param version.versionId - the version id
* @constructor * @constructor
*/ */
constructor(version?: { constructor(version?: {
isNull?: boolean; isNull?: boolean;
isNull2?: boolean;
isDeleteMarker?: boolean; isDeleteMarker?: boolean;
versionId?: string; versionId?: string;
isPHD?: boolean; isPHD?: boolean;
nullVersionId?: string;
}) { }) {
this.version = version || {}; this.version = version || {};
} }
@ -90,33 +83,6 @@ export class Version {
return `{ "isPHD": true, "versionId": "${versionId}" }`; return `{ "isPHD": true, "versionId": "${versionId}" }`;
} }
/**
* Appends a key-value pair to a JSON object represented as a string. It adds
* a comma if the object is not empty (i.e., not just '{}'). It assumes the input
* string is formatted as a JSON object.
*
* @param {string} stringifiedObject The JSON object as a string to which the key-value pair will be appended.
* @param {string} key The key to append to the JSON object.
* @param {string} value The value associated with the key to append to the JSON object.
* @returns {string} The updated JSON object as a string with the new key-value pair appended.
* @example
* _jsonAppend('{"existingKey":"existingValue"}', 'newKey', 'newValue');
* // returns '{"existingKey":"existingValue","newKey":"newValue"}'
*/
static _jsonAppend(stringifiedObject: string, key: string, value: string): string {
// stringifiedObject value has the format of '{...}'
let index = stringifiedObject.length - 2;
while (stringifiedObject.charAt(index) === ' ') {
index -= 1;
}
const needComma = stringifiedObject.charAt(index) !== '{';
return (
`${stringifiedObject.slice(0, stringifiedObject.length - 1)}` +
(needComma ? ',' : '') +
`"${key}":"${value}"}`
);
}
/** /**
* Put versionId into an object in the (cheap) way of string manipulation, * Put versionId into an object in the (cheap) way of string manipulation,
* instead of the more expensive alternative parsing and stringification. * instead of the more expensive alternative parsing and stringification.
@ -127,32 +93,14 @@ export class Version {
*/ */
static appendVersionId(value: string, versionId: string): string { static appendVersionId(value: string, versionId: string): string {
// assuming value has the format of '{...}' // assuming value has the format of '{...}'
return Version._jsonAppend(value, 'versionId', versionId); let index = value.length - 2;
} while (value.charAt(index--) === ' ');
const comma = value.charAt(index + 1) !== '{';
/** return (
* Updates or appends a `nullVersionId` property to a JSON-formatted string. `${value.slice(0, value.length - 1)}` + // eslint-disable-line
* This function first checks if the `nullVersionId` property already exists within the input string. (comma ? ',' : '') +
* If it exists, the function updates the `nullVersionId` with the new value provided. `"versionId":"${versionId}"}`
* If it does not exist, the function appends a `nullVersionId` property with the provided value. );
*
* @static
* @param {string} value - The JSON-formatted string that may already contain a `nullVersionId` property.
* @param {string} nullVersionId - The new value for the `nullVersionId` property to be updated or appended.
* @returns {string} The updated JSON-formatted string with the new `nullVersionId` value.
*/
static updateOrAppendNullVersionId(value: string, nullVersionId: string): string {
// Check if "nullVersionId" already exists in the string
const nullVersionIdPattern = /"nullVersionId":"[^"]*"/;
const nullVersionIdExists = nullVersionIdPattern.test(value);
if (nullVersionIdExists) {
// Replace the existing nullVersionId with the new one
return value.replace(nullVersionIdPattern, `"nullVersionId":"${nullVersionId}"`);
} else {
// Append nullVersionId
return Version._jsonAppend(value, 'nullVersionId', nullVersionId);
}
} }
/** /**
@ -173,19 +121,6 @@ export class Version {
return this.version.isNull ?? false; return this.version.isNull ?? false;
} }
/**
* Check if a version is a null version and has
* been put with a Cloudserver handling null keys (i.e. supporting
* S3C-7352).
*
* @return - stating if the value is a null version and has
* been put with a Cloudserver handling null keys (i.e. supporting
* S3C-7352).
*/
isNull2Version(): boolean {
return this.version.isNull2 ?? false;
}
/** /**
* Check if a stringified object is a delete marker. * Check if a stringified object is a delete marker.
* *
@ -255,19 +190,6 @@ export class Version {
return this; return this;
} }
/**
* Mark that the null version has been put with a Cloudserver handling null keys (i.e. supporting S3C-7352)
*
* If `isNull2` is set, `isNull` is also set to maintain consistency.
* Explicitly setting both avoids misunderstandings and mistakes in future updates or fixes.
* @return - the updated version
*/
setNull2Version() {
this.version.isNull2 = true;
this.version.isNull = true;
return this;
}
/** /**
* Serialize the version. * Serialize the version.
* *

View File

@ -1,8 +1,6 @@
import { RequestLogger } from 'werelogs';
import errors, { ArsenalError } from '../errors'; import errors, { ArsenalError } from '../errors';
import { Version } from './Version'; import { Version } from './Version';
import { generateVersionId as genVID, getInfVid } from './VersionID'; import { generateVersionId as genVID } from './VersionID';
import WriteCache from './WriteCache'; import WriteCache from './WriteCache';
import WriteGatheringManager from './WriteGatheringManager'; import WriteGatheringManager from './WriteGatheringManager';
@ -483,113 +481,19 @@ export default class VersioningRequestProcessor {
const versionId = request.options.versionId; const versionId = request.options.versionId;
const versionKey = formatVersionKey(key, versionId); const versionKey = formatVersionKey(key, versionId);
const ops: any = []; const ops: any = [];
const masterVersion = data !== undefined && if (!request.options.isNull) {
Version.from(data); ops.push({ key: versionKey, value: request.value });
// push a version key if we're not updating the null
// version (or in legacy Cloudservers not sending the
// 'isNull' parameter, but this has an issue, see S3C-7526)
if (request.options.isNull !== true) {
const versionOp = { key: versionKey, value: request.value };
ops.push(versionOp);
} }
if (masterVersion) { if (data === undefined ||
// master key exists (Version.from(data).getVersionId() ?? '') >= versionId) {
// note that older versions have a greater version ID // master does not exist or is not newer than put
const versionIdFromMaster = masterVersion.getVersionId(); // version and needs to be updated as well.
if (versionIdFromMaster === undefined || // Note that older versions have a greater version ID.
versionIdFromMaster >= versionId) { ops.push({ key: request.key, value: request.value });
let value = request.value; } else if (request.options.isNull) {
logger.debug('version to put is not older than master'); logger.debug('create or update null key');
// Delete the deprecated, null key for backward compatibility const nullKey = formatVersionKey(key, '');
// to avoid storing both deprecated and new null keys. ops.push({ key: nullKey, value: request.value });
// If master null version was put with an older Cloudserver (or in compat mode),
// there is a possibility that it also has a null versioned key
// associated, so we need to delete it as we write the null key.
// Deprecated null key gets deleted when the new CloudServer:
// - updates metadata of a null master (options.isNull=true)
// - puts metadata on top of a master null key (options.isNull=false)
if (request.options.isNull !== undefined && // new null key behavior when isNull is defined.
masterVersion.isNullVersion() && // master is null
!masterVersion.isNull2Version()) { // master does not support the new null key behavior yet.
const masterNullVersionId = masterVersion.getVersionId();
// The deprecated null key is referenced in the "versionId" property of the master key.
if (masterNullVersionId) {
const oldNullVersionKey = formatVersionKey(key, masterNullVersionId);
ops.push({ key: oldNullVersionKey, type: 'del' });
}
}
// new behavior when isNull is defined is to only
// update the master key if it is the latest
// version, old behavior needs to copy master to
// the null version because older Cloudservers
// rely on version-specific PUT to copy master
// contents to a new null version key (newer ones
// use special versionId="null" requests for this
// purpose).
if (versionIdFromMaster !== versionId ||
request.options.isNull === undefined) {
// master key is strictly older than the put version
let masterVersionId;
if (masterVersion.isNullVersion() && versionIdFromMaster) {
logger.debug('master key is a null version');
masterVersionId = versionIdFromMaster;
} else if (versionIdFromMaster === undefined) {
logger.debug('master key is nonversioned');
// master key does not have a versionID
// => create one with the "infinite" version ID
masterVersionId = getInfVid(this.replicationGroupId);
masterVersion.setVersionId(masterVersionId);
} else {
logger.debug('master key is a regular version');
}
if (request.options.isNull === true) {
if (!masterVersionId) {
// master is a regular version: delete the null key that
// may exist (older null version)
logger.debug('delete null key');
const nullKey = formatVersionKey(key, '');
ops.push({ key: nullKey, type: 'del' });
}
} else if (masterVersionId) {
logger.debug('create version key from master version');
// isNull === false means Cloudserver supports null keys,
// so create a null key in this case, and a version key otherwise
const masterKeyVersionId = request.options.isNull === false ?
'' : masterVersionId;
const masterVersionKey = formatVersionKey(key, masterKeyVersionId);
masterVersion.setNullVersion();
// isNull === false means Cloudserver supports null keys,
// so create a null key with the isNull2 flag
if (request.options.isNull === false) {
masterVersion.setNull2Version();
// else isNull === undefined means Cloudserver does not support null keys,
// and versionIdFromMaster !== versionId means that a version is PUT on top of a null version
// hence set/update the new master nullVersionId for backward compatibility
} else if (versionIdFromMaster !== versionId) {
// => set the nullVersionId to the master version if put version on top of null version.
value = Version.updateOrAppendNullVersionId(request.value, masterVersionId);
}
ops.push({ key: masterVersionKey,
value: masterVersion.toString() });
}
} else {
logger.debug('version to put is the master');
}
ops.push({ key, value: value });
} else {
logger.debug('version to put is older than master');
if (request.options.isNull === true && !masterVersion.isNullVersion()) {
logger.debug('create or update null key');
const nullKey = formatVersionKey(key, '');
const nullKeyOp = { key: nullKey, value: request.value };
ops.push(nullKeyOp);
// for backward compatibility: remove null version key
ops.push({ key: versionKey, type: 'del' });
}
}
} else {
// master key does not exist: create it
ops.push({ key, value: request.value });
} }
return callback(null, ops, versionId); return callback(null, ops, versionId);
}); });

View File

@ -1,5 +1,3 @@
import { RequestLogger } from 'werelogs';
import errors, { ArsenalError } from '../errors'; import errors, { ArsenalError } from '../errors';
import WriteGatheringManager from './WriteGatheringManager'; import WriteGatheringManager from './WriteGatheringManager';

View File

@ -1,5 +1,3 @@
import { RequestLogger } from 'werelogs';
import { ArsenalError } from '../errors'; import { ArsenalError } from '../errors';
const WG_TIMEOUT = 5; // batching period in milliseconds const WG_TIMEOUT = 5; // batching period in milliseconds

View File

@ -3,7 +3,7 @@
"engines": { "engines": {
"node": ">=16" "node": ">=16"
}, },
"version": "8.1.134", "version": "8.1.93",
"description": "Common utilities for the S3 project components", "description": "Common utilities for the S3 project components",
"main": "build/index.js", "main": "build/index.js",
"repository": { "repository": {
@ -19,38 +19,39 @@
"dependencies": { "dependencies": {
"@azure/identity": "^3.1.1", "@azure/identity": "^3.1.1",
"@azure/storage-blob": "^12.12.0", "@azure/storage-blob": "^12.12.0",
"@js-sdsl/ordered-set": "^4.4.2", "@types/async": "^3.2.12",
"@swc/cli": "^0.4.0", "@types/utf8": "^3.0.1",
"@swc/core": "^1.7.4", "JSONStream": "^1.0.0",
"agentkeepalive": "^4.1.3", "agentkeepalive": "^4.1.3",
"ajv": "^6.12.3", "ajv": "6.12.3",
"async": "^2.6.4", "async": "~2.6.4",
"aws-sdk": "^2.1005.0", "aws-sdk": "^2.1005.0",
"backo": "^1.1.0", "backo": "^1.1.0",
"base-x": "^3.0.8", "base-x": "3.0.8",
"base62": "^2.0.1", "base62": "2.0.1",
"bson": "^4.0.0", "bson": "4.0.0",
"debug": "^4.1.0", "debug": "~4.1.0",
"diskusage": "^1.1.1", "diskusage": "^1.1.1",
"fcntl": "git+https://git.yourcmc.ru/vitalif/zenko-fcntl.git", "fcntl": "github:scality/node-fcntl#0.2.0",
"httpagent": "git+https://git.yourcmc.ru/vitalif/zenko-httpagent.git#development/1.0", "hdclient": "scality/hdclient#1.1.5",
"httpagent": "scality/httpagent#1.0.6",
"https-proxy-agent": "^2.2.0", "https-proxy-agent": "^2.2.0",
"ioredis": "^4.28.5", "ioredis": "^4.28.5",
"ipaddr.js": "^1.9.1", "ipaddr.js": "1.9.1",
"joi": "^17.6.0", "joi": "^17.6.0",
"JSONStream": "^1.0.0", "level": "~5.0.1",
"level": "^5.0.1", "level-sublevel": "~6.6.5",
"level-sublevel": "^6.6.5",
"mongodb": "^5.2.0", "mongodb": "^5.2.0",
"node-forge": "^1.3.0", "node-forge": "^1.3.0",
"prom-client": "^14.2.0", "prom-client": "14.2.0",
"simple-glob": "^0.2.0", "simple-glob": "^0.2.0",
"socket.io": "^4.6.1", "socket.io": "2.4.1",
"socket.io-client": "^4.6.1", "socket.io-client": "2.4.0",
"utf8": "^3.0.0", "sproxydclient": "github:scality/sproxydclient#8.0.8",
"utf8": "3.0.0",
"uuid": "^3.0.1", "uuid": "^3.0.1",
"werelogs": "git+https://git.yourcmc.ru/vitalif/zenko-werelogs.git#development/8.1", "werelogs": "scality/werelogs#8.1.2",
"xml2js": "^0.4.23" "xml2js": "~0.4.23"
}, },
"optionalDependencies": { "optionalDependencies": {
"ioctl": "^2.0.2" "ioctl": "^2.0.2"
@ -59,24 +60,22 @@
"@babel/preset-env": "^7.16.11", "@babel/preset-env": "^7.16.11",
"@babel/preset-typescript": "^7.16.7", "@babel/preset-typescript": "^7.16.7",
"@sinonjs/fake-timers": "^6.0.1", "@sinonjs/fake-timers": "^6.0.1",
"@types/async": "^3.2.12",
"@types/utf8": "^3.0.1",
"@types/ioredis": "^4.28.10", "@types/ioredis": "^4.28.10",
"@types/jest": "^27.4.1", "@types/jest": "^27.4.1",
"@types/node": "^18.19.41", "@types/node": "^17.0.21",
"@types/xml2js": "^0.4.11", "@types/xml2js": "^0.4.11",
"eslint": "^8.14.0", "eslint": "^8.12.0",
"eslint-config-airbnb-base": "^15.0.0", "eslint-config-airbnb": "6.2.0",
"eslint-config-scality": "git+https://git.yourcmc.ru/vitalif/zenko-eslint-config-scality.git", "eslint-config-scality": "scality/Guidelines#ec33dfb",
"eslint-plugin-react": "^4.3.0", "eslint-plugin-react": "^4.3.0",
"jest": "^27.5.1", "jest": "^27.5.1",
"mongodb-memory-server": "^8.12.2", "mongodb-memory-server": "^8.12.2",
"nyc": "^15.1.0", "nyc": "^15.1.0",
"sinon": "^9.0.2", "sinon": "^9.0.2",
"temp": "^0.9.1", "temp": "0.9.1",
"ts-jest": "^27.1.3", "ts-jest": "^27.1.3",
"ts-node": "^10.6.0", "ts-node": "^10.6.0",
"typescript": "^4.9.5" "typescript": "^4.6.2"
}, },
"scripts": { "scripts": {
"lint": "eslint $(git ls-files '*.js')", "lint": "eslint $(git ls-files '*.js')",
@ -84,11 +83,10 @@
"lint_yml": "yamllint $(git ls-files '*.yml')", "lint_yml": "yamllint $(git ls-files '*.yml')",
"test": "jest tests/unit", "test": "jest tests/unit",
"build": "tsc", "build": "tsc",
"prepack": "tsc", "prepare": "yarn build",
"postinstall": "[ -d build ] || swc -d build --copy-files package.json index.ts lib",
"ft_test": "jest tests/functional --testTimeout=120000 --forceExit", "ft_test": "jest tests/functional --testTimeout=120000 --forceExit",
"coverage": "nyc --clean jest tests --coverage --testTimeout=120000 --forceExit", "coverage": "nyc --clean jest tests --coverage --testTimeout=120000 --forceExit",
"build_doc": "cd documentation/listingAlgos/pics; dot -Tsvg delimiterStateChart.dot > delimiterStateChart.svg; dot -Tsvg delimiterMasterV0StateChart.dot > delimiterMasterV0StateChart.svg; dot -Tsvg delimiterVersionsStateChart.dot > delimiterVersionsStateChart.svg" "build_doc": "cd documentation/listingAlgos/pics; dot -Tsvg delimiterVersionsStateChart.dot > delimiterVersionsStateChart.svg"
}, },
"private": true, "private": true,
"jest": { "jest": {

View File

@ -1,356 +0,0 @@
const async = require('async');
const assert = require('assert');
const cluster = require('cluster');
const http = require('http');
const errors = require('../../../build/lib/errors').default;
const {
setupRPCPrimary,
setupRPCWorker,
sendWorkerCommand,
getPendingCommandsCount,
} = require('../../../build/lib/clustering/ClusterRPC');
/* eslint-disable prefer-const */
let SERVER_PORT;
let N_WORKERS;
/* eslint-enable prefer-const */
/* eslint-disable no-console */
function genUIDS() {
return Math.trunc(Math.random() * 0x10000).toString(16);
}
// for testing robustness: regularly pollute the message channel with
// unrelated IPC messages
function sendPollutionMessage(message) {
if (cluster.isPrimary) {
const randomWorker = Math.trunc(Math.random() * cluster.workers.length);
const worker = cluster.workers[randomWorker];
if (worker) {
worker.send(message);
}
} else {
process.send(message);
}
}
const ipcPolluterIntervals = [
setInterval(
() => sendPollutionMessage('string pollution'), 1500),
setInterval(
() => sendPollutionMessage({ pollution: 'bar' }), 2321),
setInterval(
() => sendPollutionMessage({ type: 'pollution', foo: { bar: 'baz' } }), 2777),
];
function someTestHandlerFunc(payload, uids, callback) {
setTimeout(() => callback(null, { someResponsePayload: 'bar' }), 10);
}
function testHandlerWithFailureFunc(payload, uids, callback) {
setTimeout(() => {
// exactly one of the workers fails to execute this command
if (cluster.worker.id === 1) {
callback(errors.ServiceFailure);
} else {
callback(null, { someResponsePayload: 'bar' });
}
}, 10);
}
const rpcHandlers = {
SomeTestHandler: someTestHandlerFunc,
TestHandlerWithFailure: testHandlerWithFailureFunc,
TestHandlerWithNoResponse: () => {},
};
const primaryHandlers = {
echoHandler: (worker, payload, uids, callback) => {
callback(null, { workerId: worker.id, payload, uids });
},
errorWithHttpCodeHandler: (_worker, _payload, _uids, callback) => {
callback({ name: 'ErrorMock', code: 418, message: 'An error message from primary' });
},
};
function respondOnTestFailure(message, error, results) {
console.error('After sendWorkerCommand() resolve/reject: ' +
`${message}, error=${error}, results=${JSON.stringify(results)}`);
console.trace();
throw errors.InternalError;
}
async function successfulCommandTestGeneric(nWorkers) {
try {
const results = await sendWorkerCommand('*', 'SomeTestHandler', genUIDS(), {});
if (results.length !== nWorkers) {
return respondOnTestFailure(
`expected ${nWorkers} worker results, got ${results.length}`,
null, results);
}
for (const result of results) {
if (typeof result !== 'object' || result === null) {
return respondOnTestFailure('not all results are objects', null, results);
}
if (result.error !== null) {
return respondOnTestFailure(
'one or more workers had an unexpected error',
null, results);
}
if (typeof result.result !== 'object' || result.result === null) {
return respondOnTestFailure(
'one or more workers did not return a result object',
null, results);
}
if (result.result.someResponsePayload !== 'bar') {
return respondOnTestFailure(
'one or more workers did not return the expected payload',
null, results);
}
}
return undefined;
} catch (err) {
return respondOnTestFailure(`returned unexpected error ${err}`, err, null);
}
}
async function successfulCommandTest() {
return successfulCommandTestGeneric(N_WORKERS);
}
async function successfulCommandWithExtraWorkerTest() {
return successfulCommandTestGeneric(N_WORKERS + 1);
}
async function unsupportedToWorkersTest() {
try {
const results = await sendWorkerCommand('badToWorkers', 'SomeTestHandler', genUIDS(), {});
return respondOnTestFailure('expected an error', null, results);
} catch (err) {
if (!err.is.NotImplemented) {
return respondOnTestFailure('expected a NotImplemented error', err, null);
}
return undefined;
}
}
async function unsupportedHandlerTest() {
try {
const results = await sendWorkerCommand('*', 'AWrongTestHandler', genUIDS(), {});
if (results.length !== N_WORKERS) {
return respondOnTestFailure(
`expected ${N_WORKERS} worker results, got ${results.length}`,
null, results);
}
for (const result of results) {
if (typeof result !== 'object' || result === null) {
return respondOnTestFailure('not all results are objects', null, results);
}
if (result.error === null || !result.error.is.NotImplemented) {
return respondOnTestFailure(
'one or more workers did not return the expected NotImplemented error',
null, results);
}
}
return undefined;
} catch (err) {
return respondOnTestFailure(`returned unexpected error ${err}`, err, null);
}
}
async function missingUidsTest() {
try {
const results = await sendWorkerCommand('*', 'SomeTestHandler', undefined, {});
return respondOnTestFailure('expected an error', null, results);
} catch (err) {
if (!err.is.MissingParameter) {
return respondOnTestFailure('expected a MissingParameter error', err, null);
}
return undefined;
}
}
async function duplicateUidsTest() {
const dupUIDS = genUIDS();
const promises = [
sendWorkerCommand('*', 'SomeTestHandler', dupUIDS, {}),
sendWorkerCommand('*', 'SomeTestHandler', dupUIDS, {}),
];
const results = await Promise.allSettled(promises);
if (results[1].status !== 'rejected') {
return respondOnTestFailure('expected an error from the second call', null, null);
}
if (!results[1].reason.is.OperationAborted) {
return respondOnTestFailure(
'expected a OperationAborted error', results[1].reason, null);
}
return undefined;
}
async function unsuccessfulWorkerTest() {
try {
const results = await sendWorkerCommand('*', 'TestHandlerWithFailure', genUIDS(), {});
if (results.length !== N_WORKERS) {
return respondOnTestFailure(
`expected ${N_WORKERS} worker results, got ${results.length}`,
null, results);
}
const nServiceFailures = results.filter(result => (
result.error && result.error.is.ServiceFailure
)).length;
if (nServiceFailures !== 1) {
return respondOnTestFailure(
'expected exactly one worker result to be ServiceFailure error',
null, results);
}
return undefined;
} catch (err) {
return respondOnTestFailure(`returned unexpected error ${err}`, err, null);
}
}
async function workerTimeoutTest() {
try {
const results = await sendWorkerCommand(
'*', 'TestHandlerWithNoResponse', genUIDS(), {}, 1000);
return respondOnTestFailure('expected an error', null, results);
} catch (err) {
if (!err.is.RequestTimeout) {
return respondOnTestFailure('expected a RequestTimeout error', err, null);
}
return undefined;
}
}
async function workerToPrimaryEcho() {
const uids = genUIDS();
const payload = { testing: true };
const expected = { workerId: cluster.worker.id, payload, uids };
const results = await sendWorkerCommand('PRIMARY', 'echoHandler', uids, payload);
assert.strictEqual(results.length, 1, 'There is 1 and only 1 primary');
assert.ifError(results[0].error);
assert.deepStrictEqual(results[0].result, expected);
}
async function workerToPrimaryErrorWithHttpCode() {
const uids = genUIDS();
const payload = { testing: true };
const results = await sendWorkerCommand('PRIMARY', 'errorWithHttpCodeHandler', uids, payload);
assert.strictEqual(results.length, 1, 'There is 1 and only 1 primary');
assert.ok(results[0].error);
assert.strictEqual(results[0].error.message, 'An error message from primary');
assert.strictEqual(results[0].error.code, 418);
}
const TEST_URLS = {
'/successful-command': successfulCommandTest,
'/successful-command-with-extra-worker': successfulCommandWithExtraWorkerTest,
'/unsupported-to-workers': unsupportedToWorkersTest,
'/unsupported-handler': unsupportedHandlerTest,
'/missing-uids': missingUidsTest,
'/duplicate-uids': duplicateUidsTest,
'/unsuccessful-worker': unsuccessfulWorkerTest,
'/worker-timeout': workerTimeoutTest,
'/worker-to-primary/echo': workerToPrimaryEcho,
'/worker-to-primary/error-with-http-code': workerToPrimaryErrorWithHttpCode,
};
if (process.argv.length !== 4) {
console.error('ClusterRPC test server: GET requests on test URLs trigger test runs\n\n' +
'Usage: node ClusterRPC-test-server.js <port> <nb-workers>\n\n' +
'Available test URLs:');
console.error(`${Object.keys(TEST_URLS).map(url => `- ${url}\n`).join('')}`);
process.exit(2);
}
/* eslint-disable prefer-const */
[
SERVER_PORT,
N_WORKERS,
] = process.argv.slice(2, 4).map(value => Number.parseInt(value, 10));
/* eslint-enable prefer-const */
let server;
if (cluster.isPrimary) {
async.timesSeries(
N_WORKERS,
(i, wcb) => cluster.fork().on('online', wcb),
() => {
setupRPCPrimary(primaryHandlers);
},
);
} else {
// in worker
server = http.createServer((req, res) => {
if (req.url in TEST_URLS) {
return TEST_URLS[req.url]().then(() => {
if (getPendingCommandsCount() !== 0) {
console.error(`There are still ${getPendingCommandsCount()} pending ` +
`RPC commands after test ${req.url} completed`);
throw errors.InternalError;
}
res.writeHead(200);
res.end();
}).catch(err => {
// serialize AssertionError to be displayed nicely in jest
if (err instanceof assert.AssertionError) {
const serializedErr = JSON.stringify({
code: err.code,
message: err.message,
stack: err.stack,
actual: err.actual,
expected: err.expected,
operator: err.operator,
});
res.writeHead(500);
res.end(serializedErr);
} else {
res.writeHead(err.code || 500);
res.end(err.message);
}
});
}
console.error(`Invalid test URL ${req.url}`);
res.writeHead(400);
res.end();
return undefined;
});
server.listen(SERVER_PORT);
server.on('listening', () => {
console.log('Worker is listening');
});
setupRPCWorker(rpcHandlers);
}
function stop(signal) {
if (cluster.isPrimary) {
console.log(`Handling signal ${signal}`);
for (const worker of Object.values(cluster.workers)) {
worker.kill(signal);
worker.on('exit', () => {
console.log(`Worker ${worker.id} exited`);
});
}
}
for (const interval of ipcPolluterIntervals) {
clearInterval(interval);
}
}
process.on('SIGTERM', stop);
process.on('SIGINT', stop);
process.on('SIGPIPE', () => {});
// for testing: spawn a new worker each time SIGUSR1 is received
function spawnNewWorker() {
if (cluster.isPrimary) {
cluster.fork();
}
}
process.on('SIGUSR1', spawnNewWorker);

View File

@ -1,151 +0,0 @@
'use strict'; // eslint-disable-line
const assert = require('assert');
const http = require('http');
const readline = require('readline');
const spawn = require('child_process').spawn;
const TEST_SERVER_PORT = 8800;
const NB_WORKERS = 4;
let testServer = null;
/*
* jest tests don't correctly support cluster mode with child forked
* processes, instead we use an external test server that launches
* each test based on the provided URL, and returns either 200 for
* success or 500 for failure. A crash would also cause a failure
* from the client side.
*/
function startTestServer(done) {
testServer = spawn('node', [
`${__dirname}/ClusterRPC-test-server.js`,
TEST_SERVER_PORT,
NB_WORKERS,
]);
// gather server stderr to display test failures info
testServer.stdout.pipe(process.stdout);
testServer.stderr.pipe(process.stderr);
const rl = readline.createInterface({
input: testServer.stdout,
});
let nbListeningWorkers = 0;
rl.on('line', line => {
if (line === 'Worker is listening') {
nbListeningWorkers++;
if (nbListeningWorkers === NB_WORKERS) {
rl.close();
done();
}
}
});
}
function stopTestServer(done) {
testServer.kill('SIGTERM');
testServer.on('close', done);
}
/**
* Try to deserialize and recreate AssertionError with stackTrace from spawned server
* @param {string} responseBody maybe serialized AssertionError
* @throws {assert.AssertionError}
* @returns {undefined}
*/
function handleAssertionError(responseBody) {
let parsed;
try {
parsed = JSON.parse(responseBody);
} catch (_) {
return;
}
if (parsed && parsed.code === 'ERR_ASSERTION') {
const err = new assert.AssertionError(parsed);
err.stack = parsed.stack;
throw err;
}
}
function runTest(testUrl, cb) {
const req = http.request(`http://localhost:${TEST_SERVER_PORT}/${testUrl}`, res => {
let responseBody = '';
res
.on('data', (chunk) => {
responseBody += chunk;
})
.on('end', () => {
try {
handleAssertionError(responseBody);
expect(res.statusCode).toEqual(200);
} catch (err) {
if (!(err instanceof assert.AssertionError)) {
err.message += `\n\nBody:\n${responseBody}`;
}
return cb(err);
}
return cb();
})
.on('error', err => cb(err));
});
req
.end()
.on('error', err => cb(err));
}
describe('ClusterRPC', () => {
beforeAll(done => startTestServer(done));
afterAll(done => stopTestServer(done));
it('should send a successful command to all workers', done => {
runTest('successful-command', done);
});
it('should error if "toWorkers" field is not "*"', done => {
runTest('unsupported-to-workers', done);
});
it('should error if handler name is not known', done => {
runTest('unsupported-handler', done);
});
it('should error if "uids" field is not passed', done => {
runTest('missing-uids', done);
});
it('should error if two simultaneous commands with same "uids" field are sent', done => {
runTest('duplicate-uids', done);
});
it('should timeout if one or more workers don\'t respond in allocated time', done => {
runTest('worker-timeout', done);
});
it('should return worker errors in results array', done => {
runTest('unsuccessful-worker', done);
});
it('should send a successful command to all workers after an extra worker is spawned', done => {
const rl = readline.createInterface({
input: testServer.stdout,
});
rl.on('line', line => {
if (line === 'Worker is listening') {
rl.close();
runTest('successful-command-with-extra-worker', done);
}
});
// The test server spawns a new worker when it receives SIGUSR1
testServer.kill('SIGUSR1');
});
describe('worker to primary', () => {
it('should succeed and return a result', done => {
runTest('worker-to-primary/echo', done);
});
it('should return an error with a code', done => {
runTest('worker-to-primary/error-with-http-code', done);
});
});
});

View File

@ -454,48 +454,6 @@ describe('MongoClientInterface::metadata.deleteObjectMD', () => {
}, },
], done); ], done);
}); });
it('should delete the object directly if params.doesNotNeedOpogUpdate is true', done => {
const objName = 'object-to-delete';
const objVal = {
key: 'object-to-delete',
versionId: 'null',
};
const versionParams = {
versioning: false,
versionId: null,
repairMaster: null,
};
async.series([
next => {
metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
},
next => {
metadata.deleteObjectMD(BUCKET_NAME, objName, { doesNotNeedOpogUpdate: true }, logger, next);
},
next => {
metadata.getObjectMD(BUCKET_NAME, objName, null, logger, err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return next();
});
},
next => {
getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 0);
return next();
});
},
], done);
});
it('should throw an error if params.doesNotNeedOpogUpdate is true and object does not exist', done => {
const objName = 'non-existent-object';
metadata.deleteObjectMD(BUCKET_NAME, objName, { doesNotNeedOpogUpdate: true }, logger, err => {
assert.deepStrictEqual(err, errors.InternalError);
return done();
});
});
}); });
}); });
}); });

View File

@ -1,331 +0,0 @@
const async = require('async');
const assert = require('assert');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const { versioning } = require('../../../../index');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const BucketInfo = require('../../../../lib/models/BucketInfo').default;
const MetadataWrapper =
require('../../../../lib/storage/metadata/MetadataWrapper');
const genVID = versioning.VersionID.generateVersionId;
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const { formatMasterKey, formatVersionKey } = require('../../../../lib/storage/metadata/mongoclient/utils');
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'test-bucket-batching';
const replicationGroupId = 'RG001';
const N = 10;
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27019 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
let uidCounter = 0;
function generateVersionId() {
return genVID(`${process.pid}.${uidCounter++}`,
replicationGroupId);
}
const variations = [
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0, versioning: false },
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0, versioning: true },
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1, versioning: false },
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1, versioning: true },
];
describe('MongoClientInterface::metadata.getObjectsMD', () => {
let metadata;
let collection;
let versionId2;
const params = {
key: 'pfx1-test-object',
objVal: {
key: 'pfx1-test-object',
versionId: 'null',
},
};
function updateMasterObject(objName, versionId, objVal, vFormat, cb) {
const mKey = formatMasterKey(objName, vFormat);
collection.updateOne(
{
_id: mKey,
$or: [{
'value.versionId': {
$exists: false,
},
},
{
'value.versionId': {
$gt: versionId,
},
},
],
},
{
$set: { _id: mKey, value: objVal },
},
{ upsert: true }).then(() => cb(null)).catch(err => cb(err));
}
/**
* Sets the "deleted" property to true
* @param {string} key object name
* @param {Function} cb callback
* @return {undefined}
*/
function flagObjectForDeletion(key, cb) {
collection.updateMany(
{ 'value.key': key },
{ $set: { 'value.deleted': true } },
{ upsert: false }).then(() => cb()).catch(err => cb(err));
}
beforeAll(done => {
mongoserver.start().then(() => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27019',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.setup(done);
});
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
variations.forEach(variation => {
const itOnlyInV1 = variation.vFormat === 'v1' && variation.versioning ? it : it.skip;
describe(`vFormat : ${variation.vFormat}, versioning: ${variation.versioning}`, () => {
let paramsArr = [];
beforeEach(done => {
// reset params
paramsArr = Array.from({ length: N }, (_, i) => ({
key: `pfx1-test-object${i + 1}`,
objVal: {
key: `pfx1-test-object${i + 1}`,
versionId: 'null',
},
}));
const bucketMD = BucketInfo.fromObj({
_name: BUCKET_NAME,
_owner: 'testowner',
_ownerDisplayName: 'testdisplayname',
_creationDate: new Date().toJSON(),
_acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
_mdBucketModelVersion: 10,
_transient: false,
_deleted: false,
_serverSideEncryption: null,
_versioningConfiguration: null,
_locationConstraint: 'us-east-1',
_readLocationConstraint: null,
_cors: null,
_replicationConfiguration: null,
_lifecycleConfiguration: null,
_uid: '',
_isNFS: null,
ingestion: null,
});
const versionParams = {
versioning: variation.versioning,
versionId: null,
repairMaster: null,
};
async.series([
next => {
metadata.client.defaultBucketKeyFormat = variation.vFormat;
return next();
},
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
if (err) {
return next(err);
}
collection = metadata.client.getCollection(BUCKET_NAME);
return next();
}),
next => {
async.eachSeries(paramsArr, (params, eachCb) => {
metadata.putObjectMD(BUCKET_NAME, params.key, params.objVal,
versionParams, logger, (err, res) => {
if (err) {
return eachCb(err);
}
if (variation.versioning) {
// eslint-disable-next-line no-param-reassign
params.versionId = JSON.parse(res).versionId;
}
return eachCb(null);
});
}, next);
},
next => {
metadata.putObjectMD(BUCKET_NAME, paramsArr[N - 1].key, paramsArr[N - 1].objVal,
versionParams, logger, (err, res) => {
if (err) {
return next(err);
}
if (variation.versioning) {
versionId2 = JSON.parse(res).versionId;
} else {
versionId2 = 'null';
}
return next(null);
});
},
], done);
});
afterEach(done => {
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
it(`should get ${N} objects${variation.versioning ? '' : ' master'} versions using batching`, done => {
const request = paramsArr.map(({ key, objVal }) => ({
key,
params: {
versionId: variation.versioning ? objVal.versionId : null,
},
}));
metadata.getObjectsMD(BUCKET_NAME, request, logger, (err, objects) => {
assert.strictEqual(err, null);
assert.strictEqual(objects.length, N);
objects.forEach((obj, i) => {
assert.strictEqual(obj.doc.key, paramsArr[i].key);
if (variation.versioning) {
assert.strictEqual(obj.doc.versionId, paramsArr[i].objVal.versionId);
}
});
return done();
});
});
it('should not throw an error if object or version is inexistent and return null doc', done => {
const request = [{
key: 'nonexistent',
params: {
versionId: variation.versioning ? 'nonexistent' : null,
},
}];
metadata.getObjectsMD(BUCKET_NAME, request, logger, (err, objects) => {
assert.strictEqual(err, null);
assert.strictEqual(objects.length, 1);
assert.strictEqual(objects[0].doc, null);
done();
});
});
it(`should return latest version when master is PHD ${variation.it}`, done => {
if (!variation.versioning) {
return done();
}
const request = paramsArr.map(({ key, objVal }) => ({
key,
params: {
versionId: variation.versioning ? objVal.versionId : null,
},
}));
return async.series([
next => {
let objectName = null;
if (variations.versioning) {
objectName =
formatVersionKey(paramsArr[N - 1].key, paramsArr[N - 1].versionId, variation.vFormat);
} else {
objectName = formatMasterKey(paramsArr[N - 1].key, variation.vFormat);
}
// adding isPHD flag to master
const phdVersionId = generateVersionId();
paramsArr[N - 1].objVal.versionId = phdVersionId;
paramsArr[N - 1].objVal.isPHD = true;
updateMasterObject(objectName, phdVersionId, paramsArr[N - 1].objVal,
variation.vFormat, next);
},
// Should return latest object version
next => metadata.getObjectsMD(BUCKET_NAME, request, logger, (err, objects) => {
assert.deepStrictEqual(err, null);
objects.forEach((obj, i) => {
assert.strictEqual(obj.doc.key, paramsArr[i].objVal.key);
if (variation.versioning && i === N - 1) {
assert.strictEqual(obj.doc.versionId, versionId2);
} else {
assert.strictEqual(obj.doc.versionId, paramsArr[i].objVal.versionId);
}
});
delete params.isPHD;
return next();
}),
], done);
});
it('should fail to get an object tagged for deletion', done => {
const key = paramsArr[0].key;
flagObjectForDeletion(key, err => {
assert(err);
metadata.getObjectsMD(BUCKET_NAME, [{ key }], logger, (err, object) => {
assert.strictEqual(err, null);
assert.strictEqual(object[0].doc, null);
done();
});
});
});
itOnlyInV1(`Should return last version when master deleted ${variation.vFormat}`, done => {
const versioningParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
async.series([
// putting a delete marker as last version
next => {
paramsArr[0].versionId = null;
paramsArr[0].objVal.isDeleteMarker = true;
return metadata.putObjectMD(BUCKET_NAME, paramsArr[0].key, paramsArr[0].objVal,
versioningParams, logger, next);
},
next => metadata.getObjectsMD(BUCKET_NAME, [{ key: paramsArr[0].key }], logger, (err, objects) => {
assert.strictEqual(err, null);
assert.strictEqual(objects[0].doc.key, paramsArr[0].key);
assert.strictEqual(objects[0].doc.isDeleteMarker, true);
paramsArr[0].objVal.isDeleteMarker = null;
return next();
}),
], done);
});
});
});
});

View File

@ -0,0 +1,104 @@
const async = require('async');
const assert = require('assert');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const MetadataWrapper =
require('../../../../../lib/storage/metadata/MetadataWrapper');
const { versioning } = require('../../../../../index');
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const { makeBucketMD } = require('./utils');
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'test-lifecycle-list-bucket-v0';
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27020 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
describe('MongoClientInterface::metadata.listLifecycleObject::global', () => {
let metadata;
beforeAll(done => {
mongoserver.start().then(() => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27020',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.client.defaultBucketKeyFormat = BucketVersioningKeyFormat.v0;
metadata.setup(done);
});
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
beforeEach(done => {
const bucketMD = makeBucketMD(BUCKET_NAME);
return metadata.createBucket(BUCKET_NAME, bucketMD, logger, done);
});
afterEach(done => {
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
it('Should return error listing current versions if v0 key format', done => {
const params = {
listingType: 'DelimiterCurrent',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert(err.NotImplemented);
assert(!data);
return done();
});
});
it('Should return error listing non-current versions if v0 key format', done => {
const params = {
listingType: 'DelimiterNonCurrent',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert(err.NotImplemented);
assert(!data);
return done();
});
});
it('Should return error listing orphan delete markers if v0 key format', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert(err.NotImplemented);
assert(!data);
return done();
});
});
});

View File

@ -43,6 +43,7 @@ describe('MongoClientInterface::metadata.listLifecycleObject::nullVersion', () =
}, },
}; };
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger); metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.client.defaultBucketKeyFormat = BucketVersioningKeyFormat.v1;
metadata.setup(done); metadata.setup(done);
}); });
}); });
@ -57,159 +58,96 @@ describe('MongoClientInterface::metadata.listLifecycleObject::nullVersion', () =
], done); ], done);
}); });
[BucketVersioningKeyFormat.v0, BucketVersioningKeyFormat.v1].forEach(v => { beforeEach(done => {
describe(`bucket format version: ${v}`, () => { const bucketMD = makeBucketMD(BUCKET_NAME);
beforeEach(done => { const versionParams = {
const bucketMD = makeBucketMD(BUCKET_NAME); versioning: true,
const versionParams = { versionId: null,
versioning: true, repairMaster: null,
versionId: null, };
repairMaster: null, async.series([
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, next),
next => {
const objName = 'key0';
const timestamp = 0;
const lastModified = new Date(timestamp).toISOString();
const objVal = {
'key': objName,
'versionId': 'null',
'isNull': true,
'last-modified': lastModified,
}; };
metadata.client.defaultBucketKeyFormat = v; return metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
async.series([ },
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, next), next => {
next => { const objName = 'key1';
const objName = 'key0'; const timestamp = 0;
const timestamp = 0;
const lastModified = new Date(timestamp).toISOString(); const lastModified = new Date(timestamp).toISOString();
const objVal = { const objVal = {
'key': objName, 'key': objName,
'versionId': 'null', 'versionId': 'null',
'isNull': true, 'isNull': true,
'last-modified': lastModified, 'last-modified': lastModified,
};
return metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
},
next => {
const objName = 'key1';
const timestamp = 0;
const lastModified = new Date(timestamp).toISOString();
const objVal = {
'key': objName,
'versionId': 'null',
'isNull': true,
'last-modified': lastModified,
};
return metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
},
next => {
const objName = 'key1';
const timestamp = 0;
const lastModified = new Date(timestamp).toISOString();
const objVal = {
'key': objName,
'last-modified': lastModified,
};
return metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
},
// key2 simulates a scenario where:
// 1) bucket is versioned
// 2) put object key2
// 3) bucket versioning gets suspended
// 4) put object key2
// result:
// {
// "_id" : "Mkey0",
// "value" : {
// "key" : "key2",
// "isNull" : true,
// "versionId" : "<VersionId2>",
// "last-modified" : "2023-07-11T14:16:00.151Z",
// }
// },
// {
// "_id" : "Vkey0\u0000<VersionId1>",
// "value" : {
// "key" : "key2",
// "versionId" : "<VersionId1>",
// "tags" : {
// },
// "last-modified" : "2023-07-11T14:15:36.713Z",
// }
// },
next => {
const objName = 'key2';
const timestamp = 0;
const lastModified = new Date(timestamp).toISOString();
const objVal = {
'key': objName,
'last-modified': lastModified,
};
return metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
},
next => {
const objName = 'key2';
const timestamp = 0;
const params = {
versionId: '',
};
const lastModified = new Date(timestamp).toISOString();
const objVal = {
'key': objName,
'last-modified': lastModified,
'isNull': true,
};
return metadata.putObjectMD(BUCKET_NAME, objName, objVal, params, logger, next);
},
], done);
});
afterEach(done => metadata.deleteBucket(BUCKET_NAME, logger, done));
it('Should list the null current version and set IsNull to true', done => {
const params = {
listingType: 'DelimiterCurrent',
}; };
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => { return metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
assert.ifError(err); },
assert.strictEqual(data.IsTruncated, false); next => {
assert.strictEqual(data.Contents.length, 3); const objName = 'key1';
const timestamp = 0;
// check that key0 has a null current version const lastModified = new Date(timestamp).toISOString();
const firstKey = data.Contents[0]; const objVal = {
assert.strictEqual(firstKey.key, 'key0'); 'key': objName,
assert.strictEqual(firstKey.value.IsNull, true); 'versionId': 'null',
'last-modified': lastModified,
// check that key1 has no null current version
const secondKey = data.Contents[1];
assert.strictEqual(secondKey.key, 'key1');
assert(!secondKey.value.IsNull);
// check that key2 has a null current version
const thirdKey = data.Contents[2];
assert.strictEqual(thirdKey.key, 'key2');
assert.strictEqual(thirdKey.value.IsNull, true);
return done();
});
});
it('Should list the null non-current version and set IsNull to true', done => {
const params = {
listingType: 'DelimiterNonCurrent',
}; };
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => { return metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
assert.deepStrictEqual(err, null); },
assert.strictEqual(data.IsTruncated, false); ], done);
assert.strictEqual(data.Contents.length, 2); });
// check that key1 has a null non-current version afterEach(done => {
const firstKey = data.Contents[0]; metadata.deleteBucket(BUCKET_NAME, logger, done);
assert.strictEqual(firstKey.key, 'key1'); });
assert.strictEqual(firstKey.value.IsNull, true);
// check that key2 has no null non-current version it('Should list the null current version and set IsNull to true', done => {
const secondKey = data.Contents[1]; const params = {
assert.strictEqual(secondKey.key, 'key2'); listingType: 'DelimiterCurrent',
assert(!secondKey.value.IsNull); };
return done(); return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
}); assert.ifError(err);
}); assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 2);
// check that key0 has a null current version
const firstKey = data.Contents[0];
assert.strictEqual(firstKey.key, 'key0');
assert.strictEqual(firstKey.value.IsNull, true);
// check that key1 has not a null current version
const secondKey = data.Contents[1];
assert.strictEqual(secondKey.key, 'key1');
assert(!secondKey.value.IsNull);
return done();
});
});
it('Should list the null non-current version and set IsNull to true', done => {
const params = {
listingType: 'DelimiterNonCurrent',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 1);
// check that key1 has a null non-current version
const firstKey = data.Contents[0];
assert.strictEqual(firstKey.key, 'key1');
assert.strictEqual(firstKey.value.IsNull, true);
return done();
}); });
}); });
}); });

View File

@ -42,6 +42,7 @@ describe('MongoClientInterface::metadata.listLifecycleObject::orphan', () => {
}, },
}; };
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger); metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.client.defaultBucketKeyFormat = BucketVersioningKeyFormat.v1;
metadata.setup(done); metadata.setup(done);
}); });
}); });
@ -56,400 +57,392 @@ describe('MongoClientInterface::metadata.listLifecycleObject::orphan', () => {
], done); ], done);
}); });
[BucketVersioningKeyFormat.v0, BucketVersioningKeyFormat.v1].forEach(v => { beforeEach(done => {
describe(`bucket format version: ${v}`, () => { const bucketMD = makeBucketMD(BUCKET_NAME);
beforeEach(done => { const versionParams = {
const bucketMD = makeBucketMD(BUCKET_NAME); versioning: true,
const versionParams = { versionId: null,
repairMaster: null,
};
async.series([
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, next),
next => {
const keyName = 'pfx0-test-object';
const objVal = {
'key': keyName,
'isDeleteMarker': true,
'last-modified': new Date(0).toISOString(), // 1970-01-01T00:00:00.000Z
};
const params = {
versioning: true, versioning: true,
versionId: null,
repairMaster: null,
}; };
metadata.client.defaultBucketKeyFormat = v; return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
async.series([ },
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, next), next => {
next => { const params = {
const keyName = 'pfx0-test-object'; objName: 'pfx1-test-object',
objVal: {
const objVal = { key: 'pfx1-test-object',
'key': keyName, versionId: 'null',
'isDeleteMarker': true,
'last-modified': new Date(0).toISOString(), // 1970-01-01T00:00:00.000Z
};
const params = {
versioning: true,
};
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
}, },
next => { nbVersions: 1,
const params = { };
objName: 'pfx1-test-object', const timestamp = 0;
objVal: { putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
key: 'pfx1-test-object', params.nbVersions, timestamp, logger, next);
versionId: 'null', },
}, next => {
nbVersions: 1, const params = {
}; objName: 'pfx2-test-object',
const timestamp = 0; objVal: {
putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams, key: 'pfx2-test-object',
params.nbVersions, timestamp, logger, next); versionId: 'null',
}, },
next => { nbVersions: 1,
const params = {
objName: 'pfx2-test-object',
objVal: {
key: 'pfx2-test-object',
versionId: 'null',
},
nbVersions: 1,
};
const timestamp = 0;
putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
params.nbVersions, timestamp, logger, next);
},
next => {
const keyName = 'pfx2-test-object';
const objVal = {
'key': keyName,
'isDeleteMarker': true,
'last-modified': new Date(2).toISOString(), // 1970-01-01T00:00:00.002Z
};
const params = {
versioning: true,
};
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
},
next => {
const keyName = 'pfx3-test-object';
const objVal = {
'key': keyName,
'isDeleteMarker': true,
'last-modified': new Date(0).toISOString(), // 1970-01-01T00:00:00.000Z
};
const params = {
versioning: true,
};
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
},
next => {
const keyName = 'pfx4-test-object';
const objVal = {
'key': keyName,
'isDeleteMarker': true,
'last-modified': new Date(5).toISOString(), // 1970-01-01T00:00:00.005Z
};
const params = {
versioning: true,
};
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
},
next => {
const keyName = 'pfx4-test-object2';
const objVal = {
'key': keyName,
'isDeleteMarker': true,
'last-modified': new Date(6).toISOString(), // 1970-01-01T00:00:00.006Z
};
const params = {
versioning: true,
};
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
},
], done);
});
/* eslint-disable max-len */
// { "_id" : "Mpfx1-test-object", "value" : { "key" : "pfx1-test-object", "versionId" : "v1", "last-modified" : "1970-01-01T00:00:00.001Z" } }
// { "_id" : "Vpfx0-test-object{sep}v0", "value" : { "key" : "pfx0-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.000Z", "versionId" : "v0" } }
// { "_id" : "Vpfx1-test-object{sep}v1", "value" : { "key" : "pfx1-test-object", "versionId" : "v1", "last-modified" : "1970-01-01T00:00:00.001Z" } }
// { "_id" : "Vpfx2-test-object{sep}v3", "value" : { "key" : "pfx2-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.002Z", "versionId" : "v3" } }
// { "_id" : "Vpfx2-test-object{sep}v2", "value" : { "key" : "pfx2-test-object", "versionId" : "v2", "last-modified" : "1970-01-01T00:00:00.001Z" } }
// { "_id" : "Vpfx3-test-object{sep}v4", "value" : { "key" : "pfx3-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.000Z", "versionId" : "v4" } }
// { "_id" : "Vpfx4-test-object{sep}v5", "value" : { "key" : "pfx4-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.005Z", "versionId" : "v5" } }
// { "_id" : "Vpfx4-test-object2{sep}v6", "value" : { "key" : "pfx4-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.006Z", "versionId" : "v6" } }
/* eslint-enable max-len */
afterEach(done => {
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
it('Should list orphan delete markers', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
}; };
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => { const timestamp = 0;
assert.deepStrictEqual(err, null); putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
assert.strictEqual(data.IsTruncated, false); params.nbVersions, timestamp, logger, next);
assert(!data.NextMarker); },
assert.strictEqual(data.Contents.length, 4); next => {
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object'); const keyName = 'pfx2-test-object';
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
assert.strictEqual(data.Contents[2].key, 'pfx4-test-object');
assert.strictEqual(data.Contents[3].key, 'pfx4-test-object2');
return done();
});
});
it('Should return empty list when beforeDate is before youngest last-modified', done => { const objVal = {
const params = { 'key': keyName,
listingType: 'DelimiterOrphanDeleteMarker', 'isDeleteMarker': true,
beforeDate: '1970-01-01T00:00:00.000Z', 'last-modified': new Date(2).toISOString(), // 1970-01-01T00:00:00.002Z
}; };
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 0);
return done();
});
});
it('Should list orphan delete markers older than 1970-01-01T00:00:00.003Z', done => {
const params = { const params = {
listingType: 'DelimiterOrphanDeleteMarker', versioning: true,
beforeDate: '1970-01-01T00:00:00.003Z',
}; };
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => { return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
assert.deepStrictEqual(err, null); },
assert.strictEqual(data.IsTruncated, false); next => {
assert(!data.NextMarker); const keyName = 'pfx3-test-object';
assert.strictEqual(data.Contents.length, 2);
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
return done(); const objVal = {
}); 'key': keyName,
}); 'isDeleteMarker': true,
'last-modified': new Date(0).toISOString(), // 1970-01-01T00:00:00.000Z
it('Should return the first part of the orphan delete markers listing', done => { };
const params = { const params = {
listingType: 'DelimiterOrphanDeleteMarker', versioning: true,
maxKeys: 1,
}; };
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => { return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
assert.deepStrictEqual(err, null); },
assert.strictEqual(data.IsTruncated, true); next => {
assert.strictEqual(data.NextMarker, 'pfx0-test-object'); const keyName = 'pfx4-test-object';
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
return done(); const objVal = {
}); 'key': keyName,
}); 'isDeleteMarker': true,
'last-modified': new Date(5).toISOString(), // 1970-01-01T00:00:00.005Z
it('Should return the second part of the orphan delete markers listing', done => { };
const params = { const params = {
listingType: 'DelimiterOrphanDeleteMarker', versioning: true,
marker: 'pfx0-test-object',
maxKeys: 1,
}; };
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => { return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
assert.deepStrictEqual(err, null); },
assert.strictEqual(data.IsTruncated, true); next => {
assert.strictEqual(data.NextMarker, 'pfx3-test-object'); const keyName = 'pfx4-test-object2';
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx3-test-object');
return done(); const objVal = {
}); 'key': keyName,
}); 'isDeleteMarker': true,
'last-modified': new Date(6).toISOString(), // 1970-01-01T00:00:00.006Z
it('Should return the third part of the orphan delete markers listing', done => { };
const params = { const params = {
listingType: 'DelimiterOrphanDeleteMarker', versioning: true,
marker: 'pfx3-test-object',
maxKeys: 1,
}; };
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => { return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
assert.deepStrictEqual(err, null); },
assert.strictEqual(data.IsTruncated, true); ], done);
assert.strictEqual(data.NextMarker, 'pfx4-test-object'); });
assert.strictEqual(data.Contents.length, 1); /* eslint-disable max-len */
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object'); // { "_id" : "Mpfx1-test-object", "value" : { "key" : "pfx1-test-object", "versionId" : "v1", "last-modified" : "1970-01-01T00:00:00.001Z" } }
// { "_id" : "Vpfx0-test-object{sep}v0", "value" : { "key" : "pfx0-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.000Z", "versionId" : "v0" } }
// { "_id" : "Vpfx1-test-object{sep}v1", "value" : { "key" : "pfx1-test-object", "versionId" : "v1", "last-modified" : "1970-01-01T00:00:00.001Z" } }
// { "_id" : "Vpfx2-test-object{sep}v3", "value" : { "key" : "pfx2-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.002Z", "versionId" : "v3" } }
// { "_id" : "Vpfx2-test-object{sep}v2", "value" : { "key" : "pfx2-test-object", "versionId" : "v2", "last-modified" : "1970-01-01T00:00:00.001Z" } }
// { "_id" : "Vpfx3-test-object{sep}v4", "value" : { "key" : "pfx3-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.000Z", "versionId" : "v4" } }
// { "_id" : "Vpfx4-test-object{sep}v5", "value" : { "key" : "pfx4-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.005Z", "versionId" : "v5" } }
// { "_id" : "Vpfx4-test-object2{sep}v6", "value" : { "key" : "pfx4-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.006Z", "versionId" : "v6" } }
/* eslint-enable max-len */
return done(); afterEach(done => {
}); metadata.deleteBucket(BUCKET_NAME, logger, done);
}); });
it('Should return the fourth part of the orphan delete markers listing', done => { it('Should list orphan delete markers', done => {
const params = { const params = {
listingType: 'DelimiterOrphanDeleteMarker', listingType: 'DelimiterOrphanDeleteMarker',
marker: 'pfx4-test-object', };
maxKeys: 1, return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
}; assert.deepStrictEqual(err, null);
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => { assert.strictEqual(data.IsTruncated, false);
assert.deepStrictEqual(err, null); assert(!data.NextMarker);
assert.strictEqual(data.IsTruncated, false); assert.strictEqual(data.Contents.length, 4);
assert(!data.NextMarker); assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
assert.strictEqual(data.Contents.length, 1); assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object2'); assert.strictEqual(data.Contents[2].key, 'pfx4-test-object');
assert.strictEqual(data.Contents[3].key, 'pfx4-test-object2');
return done();
});
});
return done(); it('Should return empty list when beforeDate is before youngest last-modified', done => {
}); const params = {
}); listingType: 'DelimiterOrphanDeleteMarker',
beforeDate: '1970-01-01T00:00:00.000Z',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 0);
it('Should list the two first orphan delete markers', done => { return done();
const params = { });
listingType: 'DelimiterOrphanDeleteMarker', });
maxKeys: 2,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.Contents.length, 2);
assert.strictEqual(data.NextMarker, 'pfx3-test-object');
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
return done(); it('Should list orphan delete markers older than 1970-01-01T00:00:00.003Z', done => {
}); const params = {
}); listingType: 'DelimiterOrphanDeleteMarker',
beforeDate: '1970-01-01T00:00:00.003Z',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 2);
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
it('Should list the four first orphan delete markers', done => { return done();
const params = { });
listingType: 'DelimiterOrphanDeleteMarker', });
maxKeys: 4,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 4);
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
assert.strictEqual(data.Contents[2].key, 'pfx4-test-object');
assert.strictEqual(data.Contents[3].key, 'pfx4-test-object2');
return done(); it('Should return the first part of the orphan delete markers listing', done => {
}); const params = {
}); listingType: 'DelimiterOrphanDeleteMarker',
maxKeys: 1,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.NextMarker, 'pfx0-test-object');
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
it('Should return an empty list if no orphan delete marker starts with prefix pfx2', done => { return done();
const params = { });
listingType: 'DelimiterOrphanDeleteMarker', });
prefix: 'pfx2',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 0);
return done(); it('Should return the second part of the orphan delete markers listing', done => {
}); const params = {
}); listingType: 'DelimiterOrphanDeleteMarker',
marker: 'pfx0-test-object',
maxKeys: 1,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.NextMarker, 'pfx3-test-object');
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx3-test-object');
it('Should list orphan delete markers that start with prefix pfx4', done => { return done();
const params = { });
listingType: 'DelimiterOrphanDeleteMarker', });
prefix: 'pfx4',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 2);
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx4-test-object2');
return done(); it('Should return the third part of the orphan delete markers listing', done => {
}); const params = {
}); listingType: 'DelimiterOrphanDeleteMarker',
marker: 'pfx3-test-object',
maxKeys: 1,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.NextMarker, 'pfx4-test-object');
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
it('Should return the first orphan delete marker version that starts with prefix', done => { return done();
const params = { });
listingType: 'DelimiterOrphanDeleteMarker', });
prefix: 'pfx4',
maxKeys: 1,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.NextMarker, 'pfx4-test-object');
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
return done(); it('Should return the fourth part of the orphan delete markers listing', done => {
}); const params = {
}); listingType: 'DelimiterOrphanDeleteMarker',
marker: 'pfx4-test-object',
maxKeys: 1,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object2');
it('Should return the following orphan delete marker version that starts with prefix', done => { return done();
const params = { });
listingType: 'DelimiterOrphanDeleteMarker', });
marker: 'pfx4-test-object',
prefix: 'pfx4',
maxKeys: 1,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object2');
return done(); it('Should list the two first orphan delete markers', done => {
}); const params = {
}); listingType: 'DelimiterOrphanDeleteMarker',
maxKeys: 2,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.Contents.length, 2);
assert.strictEqual(data.NextMarker, 'pfx3-test-object');
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
it('Should return the truncated list of orphan delete markers older than 1970-01-01T00:00:00.006Z', return done();
done => { });
const params = { });
listingType: 'DelimiterOrphanDeleteMarker',
maxKeys: 2,
beforeDate: '1970-01-01T00:00:00.006Z',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.Contents.length, 2);
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
assert.strictEqual(data.NextMarker, 'pfx3-test-object');
return done(); it('Should list the four first orphan delete markers', done => {
}); const params = {
}); listingType: 'DelimiterOrphanDeleteMarker',
maxKeys: 4,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 4);
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
assert.strictEqual(data.Contents[2].key, 'pfx4-test-object');
assert.strictEqual(data.Contents[3].key, 'pfx4-test-object2');
it('Should return the following list of orphan delete markers older than 1970-01-01T00:00:00.006Z', return done();
done => { });
const params = { });
listingType: 'DelimiterOrphanDeleteMarker',
maxKeys: 2,
beforeDate: '1970-01-01T00:00:00.006Z',
marker: 'pfx3-test-object',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
return done(); it('Should return an empty list if no orphan delete marker starts with prefix pfx2', done => {
}); const params = {
}); listingType: 'DelimiterOrphanDeleteMarker',
prefix: 'pfx2',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 0);
it('Should return the truncated list of orphan delete markers older than 1970-01-01T00:00:00.001Z', return done();
done => { });
const params = { });
listingType: 'DelimiterOrphanDeleteMarker',
maxKeys: 2,
beforeDate: '1970-01-01T00:00:00.001Z',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.Contents.length, 2);
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
assert.strictEqual(data.NextMarker, 'pfx3-test-object');
return done(); it('Should list orphan delete markers that start with prefix pfx4', done => {
}); const params = {
}); listingType: 'DelimiterOrphanDeleteMarker',
prefix: 'pfx4',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 2);
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx4-test-object2');
return done();
});
});
it('Should return the first orphan delete marker version that starts with prefix', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
prefix: 'pfx4',
maxKeys: 1,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.NextMarker, 'pfx4-test-object');
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
return done();
});
});
it('Should return the following orphan delete marker version that starts with prefix', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
marker: 'pfx4-test-object',
prefix: 'pfx4',
maxKeys: 1,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object2');
return done();
});
});
it('Should return the truncated list of orphan delete markers older than 1970-01-01T00:00:00.006Z', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
maxKeys: 2,
beforeDate: '1970-01-01T00:00:00.006Z',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.Contents.length, 2);
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
assert.strictEqual(data.NextMarker, 'pfx3-test-object');
return done();
});
});
it('Should return the following list of orphan delete markers older than 1970-01-01T00:00:00.006Z', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
maxKeys: 2,
beforeDate: '1970-01-01T00:00:00.006Z',
marker: 'pfx3-test-object',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
return done();
});
});
it('Should return the truncated list of orphan delete markers older than 1970-01-01T00:00:00.001Z', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
maxKeys: 2,
beforeDate: '1970-01-01T00:00:00.001Z',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.Contents.length, 2);
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
assert.strictEqual(data.NextMarker, 'pfx3-test-object');
return done();
}); });
}); });
}); });

View File

@ -30,10 +30,10 @@ function putBulkObjectVersions(metadata, bucketName, objName, objVal, params, ve
}); });
}, (err, expectedVersionIds) => { }, (err, expectedVersionIds) => {
// The last version is removed since it represents the current version. // The last version is removed since it represents the current version.
const lastVersionId = expectedVersionIds.pop(); expectedVersionIds.pop();
// array is reversed to be alligned with the version order (latest to oldest). // array is reversed to be alligned with the version order (latest to oldest).
expectedVersionIds.reverse(); expectedVersionIds.reverse();
return cb(err, { lastVersionId, expectedVersionIds }); return cb(err, expectedVersionIds);
}); });
} }
@ -68,12 +68,10 @@ function makeBucketMD(bucketName) {
} }
function assertContents(contents, expected) { function assertContents(contents, expected) {
assert.strictEqual(contents.length, expected.length);
contents.forEach((c, i) => { contents.forEach((c, i) => {
assert.strictEqual(c.key, expected[i].key); assert.strictEqual(c.key, expected[i].key);
assert.strictEqual(c.value.LastModified, expected[i].LastModified); assert.strictEqual(c.value.LastModified, expected[i].LastModified);
assert.strictEqual(c.value.staleDate, expected[i].staleDate); assert.strictEqual(c.value.staleDate, expected[i].staleDate);
assert.strictEqual(c.value.dataStoreName, expected[i].dataStoreName);
if (expected[i].VersionId) { if (expected[i].VersionId) {
assert.strictEqual(c.value.VersionId, expected[i].VersionId); assert.strictEqual(c.value.VersionId, expected[i].VersionId);
} }

View File

@ -8,8 +8,6 @@ const MetadataWrapper =
require('../../../../lib/storage/metadata/MetadataWrapper'); require('../../../../lib/storage/metadata/MetadataWrapper');
const { versioning } = require('../../../../index'); const { versioning } = require('../../../../index');
const { BucketVersioningKeyFormat } = versioning.VersioningConstants; const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const sinon = require('sinon');
const MongoReadStream = require('../../../../lib/storage/metadata/mongoclient/readStream');
const IMPL_NAME = 'mongodb'; const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata'; const DB_NAME = 'metadata';
@ -72,13 +70,6 @@ describe('MongoClientInterface::metadata.listObject', () => {
{ upsert: false }).then(() => cb()).catch(err => cb(err)); { upsert: false }).then(() => cb()).catch(err => cb(err));
} }
function customListingParser(entries) {
return entries.map(entry => {
const tmp = JSON.parse(entry.value);
return tmp;
});
}
beforeAll(done => { beforeAll(done => {
mongoserver.start().then(() => { mongoserver.start().then(() => {
mongoserver.waitUntilRunning().then(() => { mongoserver.waitUntilRunning().then(() => {
@ -159,12 +150,6 @@ describe('MongoClientInterface::metadata.listObject', () => {
objVal: { objVal: {
key: 'pfx1-test-object', key: 'pfx1-test-object',
versionId: 'null', versionId: 'null',
location: [{
start: 0,
size: 150,
dataStoreETag: 'etag',
dataStoreVersionId: 'versionId',
}],
}, },
nbVersions: 5, nbVersions: 5,
}; };
@ -492,81 +477,6 @@ describe('MongoClientInterface::metadata.listObject', () => {
}), }),
], done); ], done);
}); });
it('Should properly destroy the MongoDBReadStream', done => {
// eslint-disable-next-line func-names
const destroyStub = sinon.stub(MongoReadStream.prototype, 'destroy').callsFake(function (...args) {
// You can add extra logic here if needed
MongoReadStream.prototype.destroy.wrappedMethod.apply(this, ...args);
});
const params = {
listingType: 'DelimiterMaster',
maxKeys: 100,
};
return metadata.listObject(BUCKET_NAME, params, logger, err => {
assert.ifError(err);
assert(destroyStub.called, 'Destroy should have been called on MongoReadStream');
// Restore original destroy method
destroyStub.restore();
return done();
});
});
it('Should properly destroy the MongoDBReadStream on error', done => {
// eslint-disable-next-line func-names
const destroyStub = sinon.stub(MongoReadStream.prototype, 'destroy').callsFake(function (...args) {
// You can add extra logic here if needed
MongoReadStream.prototype.destroy.wrappedMethod.apply(this, ...args);
});
// stub the cursor creation to emit an error
// eslint-disable-next-line func-names
const readStub = sinon.stub(MongoReadStream.prototype, '_read').callsFake(function () {
this.emit('error', new Error('error'));
});
const params = {
listingType: 'DelimiterMaster',
maxKeys: 100,
};
return metadata.listObject(BUCKET_NAME, params, logger, err => {
assert(err, 'Expected an error');
assert(destroyStub.called, 'Destroy should have been called on MongoReadStream');
destroyStub.restore();
readStub.restore();
return done();
});
});
it('Should not include location in listing result and use custom listing parser', done => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27020',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
customListingParser,
};
const parserSpy = sinon.spy(opts, 'customListingParser');
const md = new MetadataWrapper(IMPL_NAME, opts, null, logger);
md.setup(() => {
const params = {
listingType: 'DelimiterMaster',
maxKeys: 100,
};
return md.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.Contents.length, 3);
assert.strictEqual(data.Contents[0].key, 'pfx1-test-object');
assert.strictEqual(data.Contents[0].location, undefined);
assert(parserSpy.called);
return done();
});
});
});
}); });
}); });
}); });

View File

@ -1,265 +0,0 @@
import GapCache from '../../../../lib/algos/cache/GapCache';
describe('GapCache', () => {
let gapCache;
beforeEach(() => {
// exposureDelayMs=100, maxGaps=10, maxGapWeight=100
gapCache = new GapCache(100, 10, 100);
gapCache.start();
});
afterEach(() => {
gapCache.stop();
});
describe('getters and setters', () => {
it('maxGapWeight getter', () => {
expect(gapCache.maxGapWeight).toEqual(100);
});
it('maxGapWeight setter', () => {
gapCache.maxGapWeight = 123;
expect(gapCache.maxGapWeight).toEqual(123);
// check that internal gap sets have also been updated
expect(gapCache._stagingUpdates.newGaps.maxWeight).toEqual(123);
expect(gapCache._frozenUpdates.newGaps.maxWeight).toEqual(123);
});
it('exposureDelayMs getter', () => {
expect(gapCache.exposureDelayMs).toEqual(100);
});
it('exposureDelayMs setter', async () => {
// insert a first gap
gapCache.setGap('bar', 'baz', 10);
// change the exposure delay to 50ms
gapCache.exposureDelayMs = 50;
expect(gapCache.exposureDelayMs).toEqual(50);
gapCache.setGap('qux', 'quz', 10);
// wait for more than twice the new exposure delay
await new Promise(resolve => setTimeout(resolve, 200));
// only the second gap should have been exposed, due to the change of
// exposure delay subsequent to the first call to setGap()
expect(await gapCache.lookupGap('ape', 'zoo')).toEqual(
{ firstKey: 'qux', lastKey: 'quz', weight: 10 }
);
});
});
describe('clear()', () => {
it('should clear all exposed gaps', async () => {
gapCache.setGap('bar', 'baz', 10);
gapCache.setGap('qux', 'quz', 20);
await new Promise(resolve => setTimeout(resolve, 300));
expect(await gapCache.lookupGap('ape', 'zoo')).toEqual(
{ firstKey: 'bar', lastKey: 'baz', weight: 10 }
);
gapCache.clear();
expect(await gapCache.lookupGap('ape', 'zoo')).toBeNull();
});
it('should clear all staging gaps', async () => {
gapCache.setGap('bar', 'baz', 10);
gapCache.setGap('qux', 'quz', 20);
gapCache.clear();
await new Promise(resolve => setTimeout(resolve, 300));
expect(await gapCache.lookupGap('ape', 'zoo')).toBeNull();
});
it('should keep existing invalidating updates against the next new gaps', async () => {
// invalidate future gaps containing 'dog'
expect(gapCache.removeOverlappingGaps(['dog'])).toEqual(0);
// then, clear the cache
gapCache.clear();
// wait for 50ms (half of exposure delay of 100ms) before
// setting a new gap overlapping with 'dog'
await new Promise(resolve => setTimeout(resolve, 50));
gapCache.setGap('cat', 'fox', 10);
// also set a non-overlapping gap to make sure it is not invalidated
gapCache.setGap('goat', 'hog', 20);
// wait an extra 250ms to ensure all valid gaps have been exposed
await new Promise(resolve => setTimeout(resolve, 250));
// the next gap is indeed 'goat'... because 'cat'... should have been invalidated
expect(await gapCache.lookupGap('bat', 'zoo')).toEqual(
{ firstKey: 'goat', lastKey: 'hog', weight: 20 });
});
});
it('should expose gaps after at least exposureDelayMs milliseconds', async () => {
gapCache.setGap('bar', 'baz', 10);
expect(await gapCache.lookupGap('ape', 'cat')).toBeNull();
// wait for 50ms which is half of the minimum time to exposure
await new Promise(resolve => setTimeout(resolve, 50));
// the gap should not be exposed yet
expect(await gapCache.lookupGap('ape', 'cat')).toBeNull();
// wait for an extra 250ms (total 300ms): the upper bound for exposure of any
// setGap() call is twice the exposureDelayMs value, so 200ms, wait an extra
// 100ms to cope with scheduling uncertainty and GapSet processing time, after
// which the gap introduced by setGap() should always be exposed.
await new Promise(resolve => setTimeout(resolve, 250));
expect(await gapCache.lookupGap('ape', 'cat')).toEqual(
{ firstKey: 'bar', lastKey: 'baz', weight: 10 });
// check getters
expect(gapCache.maxGaps).toEqual(10);
expect(gapCache.maxGapWeight).toEqual(100);
expect(gapCache.size).toEqual(1);
// check iteration over the exposed gaps
let nGaps = 0;
for (const gap of gapCache) {
expect(gap).toEqual({ firstKey: 'bar', lastKey: 'baz', weight: 10 });
nGaps += 1;
}
expect(nGaps).toEqual(1);
// check toArray()
expect(gapCache.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
]);
});
it('removeOverlappingGaps() should invalidate all overlapping gaps that are already exposed',
async () => {
gapCache.setGap('cat', 'fox', 10);
gapCache.setGap('lion', 'seal', 20);
// wait for 3x100ms to ensure all setGap() calls have been exposed
await new Promise(resolve => setTimeout(resolve, 300));
// expect 0 gap removed because 'hog' is not in any gap
expect(gapCache.removeOverlappingGaps(['hog'])).toEqual(0);
// expect 1 gap removed because 'cat' -> 'fox' should be already exposed
expect(gapCache.removeOverlappingGaps(['dog'])).toEqual(1);
// the gap should have been invalidated permanently
expect(await gapCache.lookupGap('dog', 'fox')).toBeNull();
// the other gap should still be present
expect(await gapCache.lookupGap('rat', 'tiger')).toEqual(
{ firstKey: 'lion', lastKey: 'seal', weight: 20 });
});
it('removeOverlappingGaps() should invalidate all overlapping gaps that are not yet exposed',
async () => {
gapCache.setGap('cat', 'fox', 10);
gapCache.setGap('lion', 'seal', 20);
// make the following calls asynchronous for the sake of the
// test, but not waiting for the exposure delay
await new Promise(resolve => setImmediate(resolve));
// expect 0 gap removed because 'hog' is not in any gap
expect(gapCache.removeOverlappingGaps(['hog'])).toEqual(0);
// expect 0 gap removed because 'cat' -> 'fox' is not exposed yet,
// but internally it should have been removed from the staging or
// frozen gap set
expect(gapCache.removeOverlappingGaps(['dog'])).toEqual(0);
// wait for 3x100ms to ensure all non-invalidated setGap() calls have been exposed
await new Promise(resolve => setTimeout(resolve, 300));
// the gap should have been invalidated permanently
expect(await gapCache.lookupGap('dog', 'fox')).toBeNull();
// the other gap should now be exposed
expect(await gapCache.lookupGap('rat', 'tiger')).toEqual(
{ firstKey: 'lion', lastKey: 'seal', weight: 20 });
});
it('removeOverlappingGaps() should invalidate gaps created later by setGap() but ' +
'within the exposure delay', async () => {
// wait for 80ms (slightly less than exposure delay of 100ms)
// before calling removeOverlappingGaps(), so that the next
// exposure timer kicks in before the call to setGap()
await new Promise(resolve => setTimeout(resolve, 80));
// there is no exposed gap yet, so expect 0 gap removed
expect(gapCache.removeOverlappingGaps(['dog'])).toEqual(0);
// wait for 50ms (half of exposure delay of 100ms) before
// setting a new gap overlapping with 'dog'
await new Promise(resolve => setTimeout(resolve, 50));
gapCache.setGap('cat', 'fox', 10);
// also set a non-overlapping gap to make sure it is not invalidated
gapCache.setGap('goat', 'hog', 20);
// wait an extra 250ms to ensure all valid gaps have been exposed
await new Promise(resolve => setTimeout(resolve, 250));
// the next gap is indeed 'goat'... because 'cat'... should have been invalidated
expect(await gapCache.lookupGap('bat', 'zoo')).toEqual(
{ firstKey: 'goat', lastKey: 'hog', weight: 20 });
});
it('removeOverlappingGaps() should not invalidate gaps created more than twice ' +
'the exposure delay later', async () => {
// there is no exposed gap yet, so expect 0 gap removed
expect(gapCache.removeOverlappingGaps(['dog'])).toEqual(0);
// wait for 250ms (more than twice the exposure delay of 100ms) before
// setting a new gap overlapping with 'dog'
await new Promise(resolve => setTimeout(resolve, 250));
gapCache.setGap('cat', 'fox', 10);
// also set a non-overlapping gap to make sure it is not invalidated
gapCache.setGap('goat', 'hog', 20);
// wait for an extra 250ms to ensure the new gap is exposed
await new Promise(resolve => setTimeout(resolve, 250));
// should find the inserted gap as it should not have been invalidated
expect(await gapCache.lookupGap('bat', 'zoo')).toEqual(
{ firstKey: 'cat', lastKey: 'fox', weight: 10 });
});
it('exposed gaps should be merged when possible', async () => {
gapCache.setGap('bar', 'baz', 10);
gapCache.setGap('baz', 'qux', 10);
// wait until the merged gap is exposed
await new Promise(resolve => setTimeout(resolve, 300));
expect(await gapCache.lookupGap('ape', 'cat')).toEqual(
{ firstKey: 'bar', lastKey: 'qux', weight: 20 });
});
it('exposed gaps should be split when above maxGapWeight', async () => {
gapCache.setGap('bar', 'baz', gapCache.maxGapWeight - 1);
gapCache.setGap('baz', 'qux', 10);
// wait until the gaps are exposed
await new Promise(resolve => setTimeout(resolve, 300));
expect(await gapCache.lookupGap('cat', 'dog')).toEqual(
{ firstKey: 'baz', lastKey: 'qux', weight: 10 });
});
it('gaps should not be exposed when reaching the maxGaps limit', async () => {
const gapsArray = new Array(gapCache.maxGaps).fill(undefined).map(
(_, i) => {
const firstKey = `0000${i}`.slice(-4);
return {
firstKey,
lastKey: `${firstKey}foo`,
weight: 10,
};
}
);
for (const gap of gapsArray) {
gapCache.setGap(gap.firstKey, gap.lastKey, gap.weight);
}
// wait until the gaps are exposed
await new Promise(resolve => setTimeout(resolve, 300));
expect(gapCache.size).toEqual(gapCache.maxGaps);
gapCache.setGap('noroomforthisgap', 'noroomforthisgapfoo');
// wait until the gaps are exposed
await new Promise(resolve => setTimeout(resolve, 300));
// the number of gaps should still be 'maxGaps'
expect(gapCache.size).toEqual(gapCache.maxGaps);
// the gaps should correspond to the original array
expect(gapCache.toArray()).toEqual(gapsArray);
});
});

View File

@ -1,878 +0,0 @@
import { OrderedSet } from '@js-sdsl/ordered-set';
import GapSet from '../../../../lib/algos/cache/GapSet';
function genRandomKey(): string {
const CHARS = 'abcdefghijklmnopqrstuvwxyz0123456789';
return new Array(16).fill(undefined).map(
() => CHARS[Math.trunc(Math.random() * CHARS.length)]
).join('');
}
function genRandomUnchainedGaps(nGaps) {
const gapBounds = new Array(nGaps * 2).fill(undefined).map(
() => genRandomKey()
);
gapBounds.sort();
const gapsArray = new Array(nGaps).fill(undefined).map(
(_, i) => ({
firstKey: gapBounds[2 * i],
lastKey: gapBounds[2 * i + 1],
weight: 10,
})
);
return gapsArray;
}
function genRandomChainedGaps(nGaps) {
const gapBounds = new Array(nGaps + 1).fill(undefined).map(
() => genRandomKey()
);
gapBounds.sort();
const gapsArray = new Array(nGaps).fill(undefined).map(
(_, i) => ({
firstKey: gapBounds[i],
lastKey: gapBounds[i + 1],
weight: 10,
})
);
return gapsArray;
}
/**
* Shuffle an array in-place
*
* @param {any[]} - The array to shuffle
* @return {undefined}
*/
function shuffleArray(array) {
for (let i = array.length - 1; i > 0; i--) {
const randIndex = Math.trunc(Math.random() * (i + 1));
/* eslint-disable no-param-reassign */
const randIndexVal = array[randIndex];
array[randIndex] = array[i];
array[i] = randIndexVal;
/* eslint-enable no-param-reassign */
}
}
describe('GapSet', () => {
const INITIAL_GAPSET = [
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
];
const INITIAL_GAPSET_WITH_CHAIN = [
// single-key gap
{ firstKey: 'ape', lastKey: 'ape', weight: 1 },
// start of chain
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'baz', lastKey: 'qux', weight: 15 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
{ firstKey: 'quz', lastKey: 'rat', weight: 25 },
{ firstKey: 'rat', lastKey: 'yak', weight: 30 },
// end of chain
]
let gapsArray;
let gapSet;
let gapsArrayWithChain;
let gapSetWithChain;
beforeEach(() => {
gapsArray = JSON.parse(
JSON.stringify(INITIAL_GAPSET)
);
gapSet = GapSet.createFromArray(gapsArray, 100);
gapsArrayWithChain = JSON.parse(
JSON.stringify(INITIAL_GAPSET_WITH_CHAIN)
);
gapSetWithChain = GapSet.createFromArray(gapsArrayWithChain, 100);
});
describe('GapSet::size', () => {
it('should return 0 for an empty gap set', () => {
const emptyGapSet = new GapSet(100);
expect(emptyGapSet.size).toEqual(0);
});
it('should return the size of the gap set', () => {
expect(gapSet.size).toEqual(2);
});
it('should reflect the new size after removal of gaps', () => {
gapSet._gaps.eraseElementByKey({ firstKey: 'bar' });
expect(gapSet.size).toEqual(1);
});
});
describe('GapSet::maxWeight', () => {
it('getter', () => {
const emptyGapSet = new GapSet(123);
expect(emptyGapSet.maxWeight).toEqual(123);
});
it('setter', () => {
const emptyGapSet = new GapSet(123);
emptyGapSet.maxWeight = 456;
expect(emptyGapSet.maxWeight).toEqual(456);
});
});
describe('GapSet::setGap()', () => {
it('should start a gap with a single key in empty gap set', () => {
const emptyGapSet = new GapSet(100);
const gap = emptyGapSet.setGap('foo', 'foo', 1);
expect(gap).toEqual({ firstKey: 'foo', lastKey: 'foo', weight: 1 });
expect(emptyGapSet.toArray()).toEqual([
{ firstKey: 'foo', lastKey: 'foo', weight: 1 },
]);
});
it('should start a gap with a single key in non-empty gap set', () => {
const gap = gapSet.setGap('foo', 'foo', 1);
expect(gap).toEqual({ firstKey: 'foo', lastKey: 'foo', weight: 1 });
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'foo', lastKey: 'foo', weight: 1 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
]);
});
it('should start a gap with multiple keys in empty gap set', () => {
const emptyGapSet = new GapSet(100);
const gap = emptyGapSet.setGap('foo', 'qux', 5);
expect(gap).toEqual({ firstKey: 'foo', lastKey: 'qux', weight: 5 });
expect(emptyGapSet.toArray()).toEqual([
{ firstKey: 'foo', lastKey: 'qux', weight: 5 },
]);
});
it('should return a new object rather than a gap managed by GapSet', () => {
const emptyGapSet = new GapSet(100);
const gap = emptyGapSet.setGap('foo', 'qux', 5);
gap.lastKey = 'quz';
// check that modifying the returned gap doesn't affect the GapSet
expect(emptyGapSet.toArray()).toEqual([
{ firstKey: 'foo', lastKey: 'qux', weight: 5 },
]);
});
it('should return an existing gap that includes the wanted gap', () => {
const gap = gapSet.setGap('bat', 'bay', 5);
expect(gap).toEqual({ firstKey: 'bar', lastKey: 'baz', weight: 10 });
expect(gapSet.toArray()).toEqual(INITIAL_GAPSET);
});
it('should return an existing gap that starts with the wanted gap first key', () => {
const gap = gapSet.setGap('bar', 'bay', 5);
expect(gap).toEqual({ firstKey: 'bar', lastKey: 'baz', weight: 10 });
expect(gapSet.toArray()).toEqual(INITIAL_GAPSET);
});
it('should return an existing gap that ends with the wanted gap last key', () => {
const gap = gapSet.setGap('bat', 'baz', 5);
expect(gap).toEqual({ firstKey: 'bar', lastKey: 'baz', weight: 10 });
expect(gapSet.toArray()).toEqual(INITIAL_GAPSET);
});
it('should return the existing chained gap that starts with the first key', () => {
const gap = gapSetWithChain.setGap('baz', 'quo', 10);
expect(gap).toEqual({ firstKey: 'baz', lastKey: 'qux', weight: 15 });
expect(gapSetWithChain.toArray()).toEqual(INITIAL_GAPSET_WITH_CHAIN);
});
it('should extend a single-key gap with no other gap', () => {
const singleKeyGap = { firstKey: 'foo', lastKey: 'foo', weight: 1 };
const singleKeyGapSet = GapSet.createFromArray([singleKeyGap], 100);
const extendedGap = singleKeyGapSet.setGap('foo', 'qux', 30);
expect(extendedGap).toEqual({ firstKey: 'foo', lastKey: 'qux', weight: 31 });
expect(singleKeyGapSet.toArray()).toEqual([
{ firstKey: 'foo', lastKey: 'qux', weight: 31 },
]);
});
it('should extend a gap with no next gap', () => {
// existing gap: 'qux' -> 'quz'
const extendedGap = gapSet.setGap('qux', 'rat', 25);
expect(extendedGap).toEqual({ firstKey: 'qux', lastKey: 'rat', weight: 25 });
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'qux', lastKey: 'rat', weight: 25 },
]);
});
it('should extend a gap without overlap with next gap', () => {
// existing gap: 'bar' -> 'baz'
const extendedGap = gapSet.setGap('bar', 'dog', 15);
expect(extendedGap).toEqual({ firstKey: 'bar', lastKey: 'dog', weight: 15 });
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'dog', weight: 15 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
]);
});
it('should extend a gap starting from its last key', () => {
// existing gap: 'qux' -> 'quz'
const extendedGap = gapSet.setGap('quz', 'rat', 5);
expect(extendedGap).toEqual({ firstKey: 'qux', lastKey: 'rat', weight: 25 });
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'qux', lastKey: 'rat', weight: 25 },
]);
});
it('should merge with next gap with single-key overlap if total weight is ' +
'under maxWeight', () => {
const extendedGap = gapSet.setGap('bar', 'qux', 80);
// updated weight is accurately set as the sum of
// overlapping individual gap weights
expect(extendedGap).toEqual({ firstKey: 'bar', lastKey: 'quz', weight: 80 + 20 });
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'quz', weight: 80 + 20 },
]);
});
it('should chain with next gap with single-key overlap if total weight is ' +
'above maxWeight', () => {
const extendedGap = gapSet.setGap('bar', 'qux', 90);
expect(extendedGap).toEqual({ firstKey: 'qux', lastKey: 'quz', weight: 20 });
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'qux', weight: 90 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
]);
});
it('should merge with both previous and next gap if bounds overlap by a ' +
'single key and total weight is under maxWeight', () => {
const extendedGap = gapSet.setGap('baz', 'qux', 30);
// updated weight is accurately set as the sum of
// overlapping individual gap weights
expect(extendedGap).toEqual({ firstKey: 'bar', lastKey: 'quz', weight: 10 + 30 + 20 });
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'quz', weight: 10 + 30 + 20 },
]);
});
it('should merge with previous gap and chain with next gap if bounds overlap by a ' +
'single key on either side and weight is above maxWeight when merging on right side', () => {
const extendedGap = gapSet.setGap('baz', 'qux', 90);
expect(extendedGap).toEqual({ firstKey: 'qux', lastKey: 'quz', weight: 20 });
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'qux', weight: 100 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
]);
});
it('should chain with previous gap and merge with next gap if bounds overlap by a ' +
'single key on either side and weight is above maxWeight when merging on left side', () => {
// modified version of the common test set with increased weight
// for 'bar' -> 'baz'
const gapSet = GapSet.createFromArray([
{ firstKey: 'bar', lastKey: 'baz', weight: 80 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
], 100);
const extendedGap = gapSet.setGap('baz', 'qux', 70);
expect(extendedGap).toEqual({ firstKey: 'baz', lastKey: 'quz', weight: 90 });
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'baz', weight: 80 },
{ firstKey: 'baz', lastKey: 'quz', weight: 90 },
]);
});
it('should merge with both previous and next gap if left bound overlaps by a ' +
'single key and total weight is under maxWeight', () => {
const extendedGap = gapSet.setGap('baz', 'quxxx', 40);
// updated weight is heuristically set as the sum of the
// previous chained gap's weight and the new weight
// (excluding the overlapping gap on right side)
expect(extendedGap).toEqual({ firstKey: 'bar', lastKey: 'quz', weight: 10 + 40 });
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'quz', weight: 10 + 40 },
]);
});
it('should chain with previous gap and merge with next gap if left bound overlaps by a ' +
'single key and total weight is above maxWeight', () => {
const extendedGap = gapSet.setGap('baz', 'quxxx', 95);
// updated weight is accurately set as the sum of
// overlapping individual gap weights
expect(extendedGap).toEqual({ firstKey: 'baz', lastKey: 'quz', weight: 95 });
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'baz', lastKey: 'quz', weight: 95 },
]);
});
it('should extend a gap with overlap with next gap and large weight', () => {
const extendedGap = gapSet.setGap('bar', 'quxxx', 80);
// updated weight is heuristically chosen to be the new
// gap weight which is larger than the sum of the existing merged
// gap weights
expect(extendedGap).toEqual({ firstKey: 'bar', lastKey: 'quz', weight: 80 });
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'quz', weight: 80 },
]);
});
it('should extend a gap with overlap with next gap and small weight', () => {
const extendedGap = gapSet.setGap('bar', 'quxxx', 11);
// updated weight is heuristically chosen to be the sum of the existing merged
// gap weights which is larger than the new gap weight
expect(extendedGap).toEqual({ firstKey: 'bar', lastKey: 'quz', weight: 10 + 20 });
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'quz', weight: 10 + 20 },
]);
});
it('should extend a gap with overlap beyond last key of next gap', () => {
const extendedGap = gapSet.setGap('bar', 'rat', 80);
// updated weight is the new gap weight
expect(extendedGap).toEqual({ firstKey: 'bar', lastKey: 'rat', weight: 80 });
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'rat', weight: 80 },
]);
});
it('should extend a gap with overlap beyond last key of next gap with a chained gap ' +
'if above maxWeight', () => {
// gapSet was initialized with maxWeight=100
const extendedGap = gapSet.setGap('bar', 'rat', 105);
// returned new gap is the right-side chained gap
// updated weight is the new gap weight minus the left-side chained gap's weight
expect(extendedGap).toEqual({ firstKey: 'baz', lastKey: 'rat', weight: 105 - 10 });
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'baz', lastKey: 'rat', weight: 105 - 10 },
]);
});
it('should extend a single-key gap with overlap on chained gaps', () => {
// existing gap: 'ape' -> 'ape' (weight=1)
const extendedGap = gapSetWithChain.setGap('ape', 'dog', 30);
// updated weight heuristically including the new gap
// weight, which is larger than the overlapping gaps cumulated
// weights (10+15=25)
expect(extendedGap).toEqual({ firstKey: 'ape', lastKey: 'qux', weight: 30 });
expect(gapSetWithChain.toArray()).toEqual([
{ firstKey: 'ape', lastKey: 'qux', weight: 30 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
{ firstKey: 'quz', lastKey: 'rat', weight: 25 },
{ firstKey: 'rat', lastKey: 'yak', weight: 30 },
]);
});
it('should merge and extend + update weight a gap with overlap not past end of chained gaps',
() => {
const extendedGap = gapSetWithChain.setGap('baz', 'sea', 80);
expect(extendedGap).toEqual({ firstKey: 'baz', lastKey: 'yak', weight: 90 });
expect(gapSetWithChain.toArray()).toEqual([
{ firstKey: 'ape', lastKey: 'ape', weight: 1 },
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'baz', lastKey: 'yak', weight: 90 },
]);
});
it('should merge and extend + update weight a gap with overlap past end of chained gaps',
() => {
const extendedGap = gapSetWithChain.setGap('baz', 'zoo', 95);
expect(extendedGap).toEqual({ firstKey: 'baz', lastKey: 'zoo', weight: 95 });
expect(gapSetWithChain.toArray()).toEqual([
{ firstKey: 'ape', lastKey: 'ape', weight: 1 },
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'baz', lastKey: 'zoo', weight: 95 },
]);
});
it('should extend gap + update weight with overlap past end of chained gaps and ' +
'above maxWeight', () => {
const extendedGap = gapSetWithChain.setGap('baz', 'zoo', 105);
// updated weight is the new gap weight minus the left-side chained gap's weight
expect(extendedGap).toEqual({ firstKey: 'qux', lastKey: 'zoo', weight: 105 - 15 });
expect(gapSetWithChain.toArray()).toEqual([
{ firstKey: 'ape', lastKey: 'ape', weight: 1 },
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'baz', lastKey: 'qux', weight: 15 },
{ firstKey: 'qux', lastKey: 'zoo', weight: 105 - 15 },
]);
});
it('should return existing chained gap with overlap above maxWeight', () => {
const chainedGapsArray = [
{ firstKey: 'ant', lastKey: 'cat', weight: 90 },
{ firstKey: 'cat', lastKey: 'fox', weight: 40 },
];
const chainedGapsSet = GapSet.createFromArray(chainedGapsArray, 100);
const extendedGap = chainedGapsSet.setGap('bat', 'dog', 105);
expect(extendedGap).toEqual({ firstKey: 'cat', lastKey: 'fox', weight: 40 });
expect(chainedGapsSet.toArray()).toEqual([
{ firstKey: 'ant', lastKey: 'cat', weight: 90 },
{ firstKey: 'cat', lastKey: 'fox', weight: 40 },
]);
});
it('should merge but not extend nor update weight with overlap on chained gaps', () => {
// existing chained gap: 'baz' -> 'qux'
const extendedGap = gapSetWithChain.setGap('baz', 'quxxx', 25);
// updated weight is the sum of the two merged gap's weights
expect(extendedGap).toEqual({ firstKey: 'baz', lastKey: 'quz', weight: 15 + 20 });
expect(gapSetWithChain.toArray()).toEqual([
{ firstKey: 'ape', lastKey: 'ape', weight: 1 },
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'baz', lastKey: 'quz', weight: 15 + 20 },
{ firstKey: 'quz', lastKey: 'rat', weight: 25 },
{ firstKey: 'rat', lastKey: 'yak', weight: 30 },
]);
});
});
describe('GapSet::removeOverlappingGaps()', () => {
describe('with zero key as parameter', () => {
it('passed in an array: should not remove any gap', () => {
const nRemoved = gapSet.removeOverlappingGaps([]);
expect(nRemoved).toEqual(0);
expect(gapSet.toArray()).toEqual(INITIAL_GAPSET);
});
it('passed in a OrderedSet: should not remove any gap', () => {
const nRemoved = gapSet.removeOverlappingGaps(new OrderedSet());
expect(nRemoved).toEqual(0);
expect(gapSet.toArray()).toEqual(INITIAL_GAPSET);
});
});
describe('with an array of one key as parameter', () => {
it('should not remove any gap if no overlap', () => {
const nRemoved = gapSet.removeOverlappingGaps(['rat']);
expect(nRemoved).toEqual(0);
expect(gapSet.toArray()).toEqual(INITIAL_GAPSET);
});
it('should remove a single gap if overlaps', () => {
const nRemoved = gapSet.removeOverlappingGaps(['bat']);
expect(nRemoved).toEqual(1);
expect(gapSet.toArray()).toEqual([
// removed: { firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
]);
});
it('should remove a single gap if overlaps with first key of first gap', () => {
const nRemoved = gapSet.removeOverlappingGaps(['bar']);
expect(nRemoved).toEqual(1);
expect(gapSet.toArray()).toEqual([
// removed: { firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
]);
});
it('should remove a single gap if overlaps with first key of non-first gap', () => {
const nRemoved = gapSet.removeOverlappingGaps(['qux']);
expect(nRemoved).toEqual(1);
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
// removed: { firstKey: 'qux', lastKey: 'quz', weight: 20 },
]);
});
it('should remove a single gap if overlaps with last key', () => {
const nRemoved = gapSet.removeOverlappingGaps(['quz']);
expect(nRemoved).toEqual(1);
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
// removed: { firstKey: 'qux', lastKey: 'quz', weight: 20 },
]);
});
it('should remove a single gap in chain if overlaps with one chained gap', () => {
const nRemoved = gapSetWithChain.removeOverlappingGaps(['dog']);
expect(nRemoved).toEqual(1);
expect(gapSetWithChain.toArray()).toEqual([
{ firstKey: 'ape', lastKey: 'ape', weight: 1 },
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
// removed: { firstKey: 'baz', lastKey: 'qux', weight: 15 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
{ firstKey: 'quz', lastKey: 'rat', weight: 25 },
{ firstKey: 'rat', lastKey: 'yak', weight: 30 },
]);
});
it('should remove two gaps in chain if overlaps with two chained gap', () => {
const nRemoved = gapSetWithChain.removeOverlappingGaps(['qux']);
expect(nRemoved).toEqual(2);
expect(gapSetWithChain.toArray()).toEqual([
{ firstKey: 'ape', lastKey: 'ape', weight: 1 },
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
// removed: { firstKey: 'baz', lastKey: 'qux', weight: 15 },
// removed: { firstKey: 'qux', lastKey: 'quz', weight: 20 },
{ firstKey: 'quz', lastKey: 'rat', weight: 25 },
{ firstKey: 'rat', lastKey: 'yak', weight: 30 },
]);
});
});
describe('with an array of two keys as parameter', () => {
it('should not remove any gap if no overlap', () => {
const nRemoved = gapSet.removeOverlappingGaps(['rat', `rat\0v100`]);
expect(nRemoved).toEqual(0);
expect(gapSet.toArray()).toEqual(INITIAL_GAPSET);
});
it('should remove a single gap if both keys overlap', () => {
const nRemoved = gapSet.removeOverlappingGaps(['bat', 'bat\0v100']);
expect(nRemoved).toEqual(1);
expect(gapSet.toArray()).toEqual([
// removed: { firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
]);
});
it('should remove a single gap if min key overlaps with first key of first gap', () => {
const nRemoved = gapSet.removeOverlappingGaps(['bar\0v100', 'bar']);
expect(nRemoved).toEqual(1);
expect(gapSet.toArray()).toEqual([
// removed: { firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
]);
});
it('should remove a single gap if max key overlaps with first key of first gap', () => {
const nRemoved = gapSet.removeOverlappingGaps(['ape', 'bar']);
expect(nRemoved).toEqual(1);
expect(gapSet.toArray()).toEqual([
// removed: { firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
]);
});
it('should not remove any gap if both keys straddle an existing gap without overlap',
() => {
const nRemoved = gapSet.removeOverlappingGaps(['cow', 'ape']);
expect(nRemoved).toEqual(0);
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
]);
});
it('should remove the two last gaps in chained gaps if last gap bounds match ' +
'the two keys', () => {
const nRemoved = gapSetWithChain.removeOverlappingGaps(['yak', 'rat']);
expect(nRemoved).toEqual(2);
expect(gapSetWithChain.toArray()).toEqual([
{ firstKey: 'ape', lastKey: 'ape', weight: 1 },
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'baz', lastKey: 'qux', weight: 15 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
// removed: { firstKey: 'quz', lastKey: 'rat', weight: 25 },
// removed: { firstKey: 'rat', lastKey: 'yak', weight: 30 },
]);
});
it('should remove first and last gap in chained gaps if their bounds match ' +
'the two keys', () => {
const nRemoved = gapSetWithChain.removeOverlappingGaps(['yak', 'bar']);
expect(nRemoved).toEqual(2);
expect(gapSetWithChain.toArray()).toEqual([
{ firstKey: 'ape', lastKey: 'ape', weight: 1 },
// removed: { firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'baz', lastKey: 'qux', weight: 15 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
{ firstKey: 'quz', lastKey: 'rat', weight: 25 },
// removed: { firstKey: 'rat', lastKey: 'yak', weight: 30 },
]);
});
});
describe('with an array of three keys as parameter', () => {
it('should remove a single gap if only median key overlaps with gap', () => {
const nRemoved = gapSet.removeOverlappingGaps(['ape', 'bat', 'cow']);
expect(nRemoved).toEqual(1);
expect(gapSet.toArray()).toEqual([
// removed: { firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
]);
});
it('should remove a single-key gap and two contiguous chained gaps each overlapping' +
'with one key', () => {
const nRemoved = gapSetWithChain.removeOverlappingGaps(['ape', 'bat', 'cow']);
expect(nRemoved).toEqual(3);
expect(gapSetWithChain.toArray()).toEqual([
// removed: { firstKey: 'ape', lastKey: 'ape', weight: 1 },
// removed: { firstKey: 'bar', lastKey: 'baz', weight: 10 },
// removed: { firstKey: 'baz', lastKey: 'qux', weight: 15 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
{ firstKey: 'quz', lastKey: 'rat', weight: 25 },
{ firstKey: 'rat', lastKey: 'yak', weight: 30 },
]);
});
it('should not remove any gap if all keys are intermingled but do not overlap', () => {
const nRemoved = gapSet.removeOverlappingGaps(['ape', 'rat', 'cow']);
expect(nRemoved).toEqual(0);
expect(gapSet.toArray()).toEqual([
{ firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
]);
});
it('should remove three discontiguous chained gaps each overlapping with one key', () => {
const nRemoved = gapSetWithChain.removeOverlappingGaps(['bat', 'quxxx', 'tiger']);
expect(nRemoved).toEqual(3);
expect(gapSetWithChain.toArray()).toEqual([
{ firstKey: 'ape', lastKey: 'ape', weight: 1 },
// removed: { firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'baz', lastKey: 'qux', weight: 15 },
// removed: { firstKey: 'qux', lastKey: 'quz', weight: 20 },
{ firstKey: 'quz', lastKey: 'rat', weight: 25 },
// { firstKey: 'rat', lastKey: 'yak', weight: 30 },
]);
});
});
describe('with a OrderedSet of three keys as parameter', () => {
it('should remove a single gap if only median key overlaps with gap', () => {
const nRemoved = gapSet.removeOverlappingGaps(
new OrderedSet(['ape', 'bat', 'cow']));
expect(nRemoved).toEqual(1);
expect(gapSet.toArray()).toEqual([
// removed: { firstKey: 'bar', lastKey: 'baz', weight: 10 },
{ firstKey: 'qux', lastKey: 'quz', weight: 20 },
]);
});
});
// this helper checks that:
// - the gaps not overlapping with any key are still present in newGapsArray
// - and the gaps overlapping with at least one key have been removed from oldGapsArray
// NOTE: It uses a sorted list of keys for efficiency, otherwise it would require
// O(n^2) compute time which would be expensive with 50K keys.
function checkOverlapInvariant(sortedKeys, oldGapsArray, newGapsArray) {
let oldGapIdx = 0;
let newGapIdx = 0;
for (const key of sortedKeys) {
// for all gaps not overlapping with any key in 'sortedKeys',
// check that they are still in 'newGapsArray'
while (oldGapIdx < oldGapsArray.length &&
oldGapsArray[oldGapIdx].lastKey < key) {
expect(oldGapsArray[oldGapIdx]).toEqual(newGapsArray[newGapIdx]);
oldGapIdx += 1;
newGapIdx += 1;
}
// for the gap(s) overlapping with the current key,
// check that they have been removed from 'newGapsArray'
while (oldGapIdx < oldGapsArray.length &&
oldGapsArray[oldGapIdx].firstKey <= key) {
if (newGapIdx < newGapsArray.length) {
expect(oldGapsArray[oldGapIdx]).not.toEqual(newGapsArray[newGapIdx]);
}
++oldGapIdx;
}
}
// check the range after the last key in 'sortedKeys'
while (oldGapIdx < oldGapsArray.length) {
expect(oldGapsArray[oldGapIdx]).toEqual(newGapsArray[newGapIdx]);
oldGapIdx += 1;
newGapIdx += 1;
}
// check that no extra range is in newGapsArray
expect(newGapIdx).toEqual(newGapsArray.length);
}
[false, true].forEach(chained => {
describe(`with 10K random ${chained ? 'chained' : 'unchained'} gaps`, () => {
let largeGapsArray;
let largeGapSet;
beforeEach(() => {
largeGapsArray = chained ?
genRandomChainedGaps(10000) :
genRandomUnchainedGaps(10000);
largeGapSet = GapSet.createFromArray(largeGapsArray, 100);
});
[{
desc: 'equal to their first key',
getGapKey: gap => gap.firstKey,
}, {
desc: 'equal to their last key',
getGapKey: gap => gap.lastKey,
}, {
desc: 'neither their first nor last key',
getGapKey: gap => `${gap.firstKey}/foo`,
}].forEach(testCase => {
it(`should remove the overlapping gap(s) with one key ${testCase.desc}`, () => {
const gapIndex = 5000;
const gap = largeGapsArray[gapIndex];
const overlappingKey = testCase.getGapKey(gap);
const nRemoved = largeGapSet.removeOverlappingGaps([overlappingKey]);
let firstRemovedGapIndex, lastRemovedGapIndex;
if (chained && overlappingKey === gap.firstKey) {
expect(nRemoved).toEqual(2);
[firstRemovedGapIndex, lastRemovedGapIndex] = [4999, 5000];
} else if (chained && overlappingKey === gap.lastKey) {
expect(nRemoved).toEqual(2);
[firstRemovedGapIndex, lastRemovedGapIndex] = [5000, 5001];
} else {
expect(nRemoved).toEqual(1);
[firstRemovedGapIndex, lastRemovedGapIndex] = [5000, 5000];
}
const expectedGaps = [
...largeGapsArray.slice(0, firstRemovedGapIndex),
...largeGapsArray.slice(lastRemovedGapIndex + 1)
];
const newGaps = largeGapSet.toArray();
expect(newGaps).toEqual(expectedGaps);
});
it(`should remove all gaps when they all overlap with one key ${testCase.desc}`,
() => {
// simulate a scenario made of 200 batches of 50 operations, each with
// random keys scattered across all gaps that each overlaps a distinct gap
// (supposedly a worst-case performance scenario for such batch sizes)
const overlappingKeys = largeGapsArray.map(testCase.getGapKey);
shuffleArray(overlappingKeys);
for (let i = 0; i < overlappingKeys.length; i += 50) {
const nRemoved = largeGapSet.removeOverlappingGaps(
overlappingKeys.slice(i, i + 50));
// with unchained gaps, we expect to have removed exactly
// 50 gaps (the size of 'overlappingKeys').
if (!chained) {
expect(nRemoved).toEqual(50);
}
}
const newGaps = largeGapSet.toArray();
expect(newGaps).toEqual([]);
});
});
it('should remove only and all overlapping gaps with 50K randomized keys', () => {
const randomizedKeys = new Array(50000).fill(undefined).map(
() => genRandomKey()
);
for (let i = 0; i < randomizedKeys.length; i += 50) {
largeGapSet.removeOverlappingGaps(
randomizedKeys.slice(i, i + 50));
}
const newGaps = largeGapSet.toArray();
randomizedKeys.sort();
checkOverlapInvariant(randomizedKeys, largeGapsArray, newGaps);
});
});
});
});
describe('GapSet::_coalesceGapChain()', () => {
afterEach(() => {
// check that the gap sets were not modified by the operation
expect(gapSet.toArray()).toEqual(INITIAL_GAPSET);
expect(gapSetWithChain.toArray()).toEqual(INITIAL_GAPSET_WITH_CHAIN);
});
it('should not coalesce if gaps are not chained', async () => {
const gap = { firstKey: 'bar', lastKey: 'baz', weight: 10 };
const coalescedGap = await gapSet._coalesceGapChain(gap);
expect(coalescedGap).toEqual({ firstKey: 'bar', lastKey: 'baz', weight: 10 });
});
it('should coalesce one chained gap', async () => {
const gap = { firstKey: 'quz', lastKey: 'rat', weight: 25 };
const coalescedGap = await gapSetWithChain._coalesceGapChain(gap);
expect(coalescedGap).toEqual({ firstKey: 'quz', lastKey: 'yak', weight: 55 });
});
it('should coalesce a chain of five gaps', async () => {
const gap = { firstKey: 'bar', lastKey: 'baz', weight: 10 };
const coalescedGap = await gapSetWithChain._coalesceGapChain(gap);
expect(coalescedGap).toEqual({ firstKey: 'bar', lastKey: 'yak', weight: 100 });
});
it('should coalesce a chain of one thousand gaps', async () => {
const getKey = i => `000${i}`.slice(-4);
const thousandGapsArray = new Array(1000).fill(undefined).map(
(_, i) => ({ firstKey: getKey(i), lastKey: getKey(i + 1), weight: 10 })
);
const thousandGapsSet = GapSet.createFromArray(thousandGapsArray, 100);
const gap = { firstKey: '0000', lastKey: '0001', weight: 10 };
const coalescedGap = await thousandGapsSet._coalesceGapChain(gap);
expect(coalescedGap).toEqual({ firstKey: '0000', lastKey: '1000', weight: 10000 });
});
it('should coalesce a single-key gap', async () => {
const singleKeyGapSet = GapSet.createFromArray([
{ firstKey: '0000', lastKey: '0000', weight: 1 },
], 100);
const gap = { firstKey: '0000', lastKey: '0000', weight: 1 };
const coalescedGap = await singleKeyGapSet._coalesceGapChain(gap);
expect(coalescedGap).toEqual({ firstKey: '0000', lastKey: '0000', weight: 1 });
});
it('should coalesce a chain of two gaps ending with a single-key gap', async () => {
const singleKeyGapSet = GapSet.createFromArray([
{ firstKey: '0000', lastKey: '0003', weight: 9 },
{ firstKey: '0003', lastKey: '0003', weight: 1 },
], 100);
const gap = { firstKey: '0000', lastKey: '0003', weight: 9 };
const coalescedGap = await singleKeyGapSet._coalesceGapChain(gap);
expect(coalescedGap).toEqual({ firstKey: '0000', lastKey: '0003', weight: 9 });
});
});
describe('GapSet::lookupGap()', () => {
afterEach(() => {
// check that the gap sets were not modified by the operation
expect(gapSet.toArray()).toEqual(INITIAL_GAPSET);
expect(gapSetWithChain.toArray()).toEqual(INITIAL_GAPSET_WITH_CHAIN);
});
it('should return null with empty cache', async () => {
const emptyGapSet = new GapSet(100);
const gap = await emptyGapSet.lookupGap('cat', 'dog');
expect(gap).toBeNull();
});
it('should return null if no gap overlaps [minKey, maxKey]', async () => {
const gap = await gapSet.lookupGap('cat', 'dog');
expect(gap).toBeNull();
});
it('should return the first gap that overlaps if all gaps overlap', async () => {
const gap = await gapSet.lookupGap('ape', 'zoo');
expect(gap).toEqual({ firstKey: 'bar', lastKey: 'baz', weight: 10 });
});
it('should return an existing gap that contains [minKey, maxKey]', async () => {
const gap1 = await gapSet.lookupGap('bat', 'bay');
expect(gap1).toEqual({ firstKey: 'bar', lastKey: 'baz', weight: 10 });
const gap2 = await gapSet.lookupGap('quxxx', 'quy');
expect(gap2).toEqual({ firstKey: 'qux', lastKey: 'quz', weight: 20 });
});
it('should return an existing gap that overlaps with minKey but not maxKey', async () => {
const gap = await gapSet.lookupGap('ape', 'bat');
expect(gap).toEqual({ firstKey: 'bar', lastKey: 'baz', weight: 10 });
});
it('should return an existing gap that overlaps just with minKey when no maxKey is provided',
async () => {
const gap = await gapSet.lookupGap('ape');
expect(gap).toEqual({ firstKey: 'bar', lastKey: 'baz', weight: 10 });
});
it('should return an existing gap that overlaps with maxKey but not minKey', async () => {
const gap = await gapSet.lookupGap('bat', 'cat');
expect(gap).toEqual({ firstKey: 'bar', lastKey: 'baz', weight: 10 });
});
it('should return an existing gap that is contained in [minKey, maxKey] strictly', async () => {
const gap = await gapSet.lookupGap('dog', 'rat');
expect(gap).toEqual({ firstKey: 'qux', lastKey: 'quz', weight: 20 });
});
it('should return a coalesced gap from chained gaps that fully overlaps [minKey, maxKey]', async () => {
const gap = await gapSetWithChain.lookupGap('bat', 'zoo');
expect(gap).toEqual({ firstKey: 'bar', lastKey: 'yak', weight: 100 });
});
it('should return a coalesced gap from chained gaps that contain [minKey, maxKey] strictly',
async () => {
const gap = await gapSetWithChain.lookupGap('bog', 'dog');
expect(gap).toEqual({ firstKey: 'baz', lastKey: 'yak', weight: 90 });
});
});
});

View File

@ -7,17 +7,23 @@ const DelimiterMaster =
require('../../../../lib/algos/list/delimiterMaster').DelimiterMaster; require('../../../../lib/algos/list/delimiterMaster').DelimiterMaster;
const Werelogs = require('werelogs').Logger; const Werelogs = require('werelogs').Logger;
const logger = new Werelogs('listTest'); const logger = new Werelogs('listTest');
const performListing = require('../../../utils/performListing');
const zpad = require('../../helpers').zpad; const zpad = require('../../helpers').zpad;
const { inc } = require('../../../../lib/algos/list/tools'); const { inc } = require('../../../../lib/algos/list/tools');
const VSConst = require('../../../../lib/versioning/constants').VersioningConstants; const VSConst = require('../../../../lib/versioning/constants').VersioningConstants;
const { DbPrefixes } = VSConst; const { DbPrefixes } = VSConst;
class Test { class Test {
constructor(name, input, genMDParams, output) { constructor(name, input, genMDParams, output, filter) {
this.name = name; this.name = name;
this.input = input; this.input = input;
this.genMDParams = genMDParams; this.genMDParams = genMDParams;
this.output = output; this.output = output;
this.filter = filter || this._defaultFilter;
}
_defaultFilter() {
return true;
} }
} }
@ -27,7 +33,7 @@ const valueDeleteMarker = '{"hello":"world","isDeleteMarker":"true"}';
const data = [ const data = [
{ key: 'Pâtisserie=中文-español-English', value }, { key: 'Pâtisserie=中文-español-English', value },
{ key: 'notes/spring/1.txt', value }, { key: 'notes/spring/1.txt', value },
{ key: 'notes/spring/4.txt', value }, { key: 'notes/spring/2.txt', value },
{ key: 'notes/spring/march/1.txt', value }, { key: 'notes/spring/march/1.txt', value },
{ key: 'notes/summer/1.txt', value }, { key: 'notes/summer/1.txt', value },
{ key: 'notes/summer/2.txt', value }, { key: 'notes/summer/2.txt', value },
@ -50,9 +56,6 @@ const dataVersioned = [
{ key: 'notes/spring/2.txt\0foo', value }, { key: 'notes/spring/2.txt\0foo', value },
{ key: 'notes/spring/3.txt', value: valueDeleteMarker }, { key: 'notes/spring/3.txt', value: valueDeleteMarker },
{ key: 'notes/spring/3.txt\0foo', value }, { key: 'notes/spring/3.txt\0foo', value },
{ key: 'notes/spring/4.txt', value: valuePHD },
{ key: 'notes/spring/4.txt\0bar', value },
{ key: 'notes/spring/4.txt\0foo', value },
{ key: 'notes/spring/march/1.txt', value }, { key: 'notes/spring/march/1.txt', value },
{ key: 'notes/spring/march/1.txt\0bar', value }, { key: 'notes/spring/march/1.txt\0bar', value },
{ key: 'notes/spring/march/1.txt\0foo', value }, { key: 'notes/spring/march/1.txt\0foo', value },
@ -75,8 +78,15 @@ const dataVersioned = [
{ key: 'notes/yore.rs', value }, { key: 'notes/yore.rs', value },
{ key: 'notes/zaphod/Beeblebrox.txt', value }, { key: 'notes/zaphod/Beeblebrox.txt', value },
]; ];
const nonAlphabeticalData = [
{ key: 'zzz', value },
{ key: 'aaa', value },
];
const receivedData = data.map(item => ({ key: item.key, value: item.value })); const receivedData = data.map(item => ({ key: item.key, value: item.value }));
const receivedNonAlphaData = nonAlphabeticalData.map(
item => ({ key: item.key, value: item.value }),
);
const tests = [ const tests = [
new Test('all elements', {}, { new Test('all elements', {}, {
@ -114,7 +124,7 @@ const tests = [
Delimiter: undefined, Delimiter: undefined,
IsTruncated: false, IsTruncated: false,
NextMarker: undefined, NextMarker: undefined,
}), }, (e, input) => e.key > input.marker),
new Test('with bad marker', { new Test('with bad marker', {
marker: 'zzzz', marker: 'zzzz',
delimiter: '/', delimiter: '/',
@ -132,7 +142,7 @@ const tests = [
Delimiter: '/', Delimiter: '/',
IsTruncated: false, IsTruncated: false,
NextMarker: undefined, NextMarker: undefined,
}), }, (e, input) => e.key > input.marker),
new Test('with makKeys', { new Test('with makKeys', {
maxKeys: 3, maxKeys: 3,
}, { }, {
@ -209,12 +219,12 @@ const tests = [
marker: 'notes/summer0', marker: 'notes/summer0',
}, { }, {
v0: { v0: {
gt: 'notes/summer0', gt: `notes/summer${inc('/')}`,
lt: 'notes/summer0', lt: `notes/summer${inc('/')}`,
}, },
v1: { v1: {
gt: `${DbPrefixes.Master}notes/summer0`, gt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
lt: `${DbPrefixes.Master}notes/summer0`, lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
}, },
}, { }, {
Contents: [], Contents: [],
@ -222,18 +232,18 @@ const tests = [
Delimiter: '/', Delimiter: '/',
IsTruncated: false, IsTruncated: false,
NextMarker: undefined, NextMarker: undefined,
}), }, (e, input) => e.key > input.marker),
new Test('delimiter and prefix (related to #147)', { new Test('delimiter and prefix (related to #147)', {
delimiter: '/', delimiter: '/',
prefix: 'notes/', prefix: 'notes/',
}, { }, {
v0: { v0: {
gte: 'notes/', gte: 'notes/',
lt: 'notes0', lt: `notes${inc('/')}`,
}, },
v1: { v1: {
gte: `${DbPrefixes.Master}notes/`, gte: `${DbPrefixes.Master}notes/`,
lt: `${DbPrefixes.Master}notes0`, lt: `${DbPrefixes.Master}notes${inc('/')}`,
}, },
}, { }, {
Contents: [ Contents: [
@ -256,11 +266,11 @@ const tests = [
}, { }, {
v0: { v0: {
gt: 'notes/year.txt', gt: 'notes/year.txt',
lt: 'notes0', lt: `notes${inc('/')}`,
}, },
v1: { v1: {
gt: `${DbPrefixes.Master}notes/year.txt`, gt: `${DbPrefixes.Master}notes/year.txt`,
lt: `${DbPrefixes.Master}notes0`, lt: `${DbPrefixes.Master}notes${inc('/')}`,
}, },
}, { }, {
Contents: [ Contents: [
@ -272,8 +282,8 @@ const tests = [
Delimiter: '/', Delimiter: '/',
IsTruncated: false, IsTruncated: false,
NextMarker: undefined, NextMarker: undefined,
}), }, (e, input) => e.key > input.marker),
new Test('all parameters 1/5', { new Test('all parameters 1/3', {
delimiter: '/', delimiter: '/',
prefix: 'notes/', prefix: 'notes/',
marker: 'notes/', marker: 'notes/',
@ -281,11 +291,11 @@ const tests = [
}, { }, {
v0: { v0: {
gt: 'notes/', gt: 'notes/',
lt: 'notes0', lt: `notes${inc('/')}`,
}, },
v1: { v1: {
gt: `${DbPrefixes.Master}notes/`, gt: `${DbPrefixes.Master}notes/`,
lt: `${DbPrefixes.Master}notes0`, lt: `${DbPrefixes.Master}notes${inc('/')}`,
}, },
}, { }, {
Contents: [], Contents: [],
@ -293,21 +303,21 @@ const tests = [
Delimiter: '/', Delimiter: '/',
IsTruncated: true, IsTruncated: true,
NextMarker: 'notes/spring/', NextMarker: 'notes/spring/',
}), }, (e, input) => e.key > input.marker),
new Test('all parameters 2/5', { new Test('all parameters 2/3', {
delimiter: '/', delimiter: '/',
prefix: 'notes/', prefix: 'notes/', // prefix
marker: 'notes/spring/', marker: 'notes/spring/',
maxKeys: 1, maxKeys: 1,
}, { }, {
v0: { v0: {
gte: 'notes/spring0', gt: 'notes/spring/',
lt: 'notes0', lt: `notes${inc('/')}`,
}, },
v1: { v1: {
gte: `${DbPrefixes.Master}notes/spring0`, gt: `${DbPrefixes.Master}notes/spring/`,
lt: `${DbPrefixes.Master}notes0`, lt: `${DbPrefixes.Master}notes${inc('/')}`,
}, },
}, { }, {
Contents: [], Contents: [],
@ -315,21 +325,21 @@ const tests = [
Delimiter: '/', Delimiter: '/',
IsTruncated: true, IsTruncated: true,
NextMarker: 'notes/summer/', NextMarker: 'notes/summer/',
}), }, (e, input) => e.key > input.marker),
new Test('all parameters 3/5', { new Test('all parameters 3/3', {
delimiter: '/', delimiter: '/',
prefix: 'notes/', prefix: 'notes/', // prefix
marker: 'notes/summer/', marker: 'notes/summer/',
maxKeys: 1, maxKeys: 1,
}, { }, {
v0: { v0: {
gte: 'notes/summer0', gt: 'notes/summer/',
lt: 'notes0', lt: `notes${inc('/')}`,
}, },
v1: { v1: {
gte: `${DbPrefixes.Master}notes/summer0`, gt: `${DbPrefixes.Master}notes/summer/`,
lt: `${DbPrefixes.Master}notes0`, lt: `${DbPrefixes.Master}notes${inc('/')}`,
}, },
}, { }, {
Contents: [ Contents: [
@ -339,21 +349,21 @@ const tests = [
Delimiter: '/', Delimiter: '/',
IsTruncated: true, IsTruncated: true,
NextMarker: 'notes/year.txt', NextMarker: 'notes/year.txt',
}), }, (e, input) => e.key > input.marker),
new Test('all parameters 4/5', { new Test('all parameters 4/3', {
delimiter: '/', delimiter: '/',
prefix: 'notes/', prefix: 'notes/', // prefix
marker: 'notes/year.txt', marker: 'notes/year.txt',
maxKeys: 1, maxKeys: 1,
}, { }, {
v0: { v0: {
gt: 'notes/year.txt', gt: 'notes/year.txt',
lt: 'notes0', lt: `notes${inc('/')}`,
}, },
v1: { v1: {
gt: `${DbPrefixes.Master}notes/year.txt`, gt: `${DbPrefixes.Master}notes/year.txt`,
lt: `${DbPrefixes.Master}notes0`, lt: `${DbPrefixes.Master}notes${inc('/')}`,
}, },
}, { }, {
Contents: [ Contents: [
@ -363,9 +373,9 @@ const tests = [
Delimiter: '/', Delimiter: '/',
IsTruncated: true, IsTruncated: true,
NextMarker: 'notes/yore.rs', NextMarker: 'notes/yore.rs',
}), }, (e, input) => e.key > input.marker),
new Test('all parameters 5/5', { new Test('all parameters 5/3', {
delimiter: '/', delimiter: '/',
prefix: 'notes/', prefix: 'notes/',
marker: 'notes/yore.rs', marker: 'notes/yore.rs',
@ -373,11 +383,11 @@ const tests = [
}, { }, {
v0: { v0: {
gt: 'notes/yore.rs', gt: 'notes/yore.rs',
lt: 'notes0', lt: `notes${inc('/')}`,
}, },
v1: { v1: {
gt: `${DbPrefixes.Master}notes/yore.rs`, gt: `${DbPrefixes.Master}notes/yore.rs`,
lt: `${DbPrefixes.Master}notes0`, lt: `${DbPrefixes.Master}notes${inc('/')}`,
}, },
}, { }, {
Contents: [], Contents: [],
@ -385,29 +395,7 @@ const tests = [
Delimiter: '/', Delimiter: '/',
IsTruncated: false, IsTruncated: false,
NextMarker: undefined, NextMarker: undefined,
}), }, (e, input) => e.key > input.marker),
new Test('marker inside common prefix', {
delimiter: '/',
prefix: 'notes/',
marker: 'notes/spring/1.txt',
maxKeys: 1,
}, {
v0: {
gte: 'notes/spring0',
lt: 'notes0',
},
v1: {
gte: `${DbPrefixes.Master}notes/spring0`,
lt: `${DbPrefixes.Master}notes0`,
},
}, {
Contents: [],
CommonPrefixes: ['notes/summer/'],
Delimiter: '/',
IsTruncated: true,
NextMarker: 'notes/summer/',
}),
new Test('all elements v2', { new Test('all elements v2', {
v2: true, v2: true,
@ -447,7 +435,7 @@ const tests = [
Delimiter: undefined, Delimiter: undefined,
IsTruncated: false, IsTruncated: false,
NextContinuationToken: undefined, NextContinuationToken: undefined,
}), }, (e, input) => e.key > input.startAfter),
new Test('with bad startAfter', { new Test('with bad startAfter', {
startAfter: 'zzzz', startAfter: 'zzzz',
delimiter: '/', delimiter: '/',
@ -466,7 +454,7 @@ const tests = [
Delimiter: '/', Delimiter: '/',
IsTruncated: false, IsTruncated: false,
NextContinuationToken: undefined, NextContinuationToken: undefined,
}), }, (e, input) => e.key > input.startAfter),
new Test('with valid continuationToken', { new Test('with valid continuationToken', {
continuationToken: receivedData[4].key, continuationToken: receivedData[4].key,
v2: true, v2: true,
@ -490,7 +478,7 @@ const tests = [
Delimiter: undefined, Delimiter: undefined,
IsTruncated: false, IsTruncated: false,
NextContinuationToken: undefined, NextContinuationToken: undefined,
}), }, (e, input) => e.key > input.continuationToken),
new Test('with bad continuationToken', { new Test('with bad continuationToken', {
continuationToken: 'zzzz', continuationToken: 'zzzz',
delimiter: '/', delimiter: '/',
@ -509,49 +497,47 @@ const tests = [
Delimiter: '/', Delimiter: '/',
IsTruncated: false, IsTruncated: false,
NextContinuationToken: undefined, NextContinuationToken: undefined,
}), }, (e, input) => e.key > input.continuationToken),
new Test('bad startAfter and good prefix', { new Test('bad startAfter and good prefix', {
delimiter: '/', delimiter: '/',
prefix: 'notes/summer/', prefix: 'notes/summer/',
startAfter: 'notes/summer0', startAfter: 'notes/summer0',
v2: true,
}, { }, {
v0: { v0: {
gt: 'notes/summer0', gte: 'notes/summer/',
lt: 'notes/summer0', lt: `notes/summer${inc('/')}`,
}, },
v1: { v1: {
gt: `${DbPrefixes.Master}notes/summer0`, gte: `${DbPrefixes.Master}notes/summer/`,
lt: `${DbPrefixes.Master}notes/summer0`, lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
}, },
}, { }, {
Contents: [], Contents: [],
CommonPrefixes: [], CommonPrefixes: [],
Delimiter: '/', Delimiter: '/',
IsTruncated: false, IsTruncated: false,
NextContinuationToken: undefined, NextMarker: undefined,
}), }, (e, input) => e.key > input.startAfter),
new Test('bad continuation token and good prefix', { new Test('bad continuation token and good prefix', {
delimiter: '/', delimiter: '/',
prefix: 'notes/summer/', prefix: 'notes/summer/',
continuationToken: 'notes/summer0', continuationToken: 'notes/summer0',
v2: true,
}, { }, {
v0: { v0: {
gt: 'notes/summer0', gte: 'notes/summer/',
lt: 'notes/summer0', lt: `notes/summer${inc('/')}`,
}, },
v1: { v1: {
gt: `${DbPrefixes.Master}notes/summer0`, gte: `${DbPrefixes.Master}notes/summer/`,
lt: `${DbPrefixes.Master}notes/summer0`, lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
}, },
}, { }, {
Contents: [], Contents: [],
CommonPrefixes: [], CommonPrefixes: [],
Delimiter: '/', Delimiter: '/',
IsTruncated: false, IsTruncated: false,
NextContinuationToken: undefined, NextMarker: undefined,
}), }, (e, input) => e.key > input.continuationToken),
new Test('no delimiter v2', { new Test('no delimiter v2', {
startAfter: 'notes/year.txt', startAfter: 'notes/year.txt',
@ -573,9 +559,9 @@ const tests = [
Delimiter: undefined, Delimiter: undefined,
IsTruncated: true, IsTruncated: true,
NextContinuationToken: 'notes/yore.rs', NextContinuationToken: 'notes/yore.rs',
}), }, (e, input) => e.key > input.startAfter),
new Test('all parameters v2 1/5', { new Test('all parameters v2 1/6', {
delimiter: '/', delimiter: '/',
prefix: 'notes/', prefix: 'notes/',
startAfter: 'notes/', startAfter: 'notes/',
@ -584,11 +570,11 @@ const tests = [
}, { }, {
v0: { v0: {
gt: 'notes/', gt: 'notes/',
lt: 'notes0', lt: `notes${inc('/')}`,
}, },
v1: { v1: {
gt: `${DbPrefixes.Master}notes/`, gt: `${DbPrefixes.Master}notes/`,
lt: `${DbPrefixes.Master}notes0`, lt: `${DbPrefixes.Master}notes${inc('/')}`,
}, },
}, { }, {
Contents: [], Contents: [],
@ -596,9 +582,9 @@ const tests = [
Delimiter: '/', Delimiter: '/',
IsTruncated: true, IsTruncated: true,
NextContinuationToken: 'notes/spring/', NextContinuationToken: 'notes/spring/',
}), }, (e, input) => e.key > input.startAfter),
new Test('all parameters v2 2/5', { new Test('all parameters v2 2/6', {
delimiter: '/', delimiter: '/',
prefix: 'notes/', prefix: 'notes/',
continuationToken: 'notes/spring/', continuationToken: 'notes/spring/',
@ -606,12 +592,12 @@ const tests = [
v2: true, v2: true,
}, { }, {
v0: { v0: {
gte: 'notes/spring0', gt: 'notes/spring/',
lt: 'notes0', lt: `notes${inc('/')}`,
}, },
v1: { v1: {
gte: `${DbPrefixes.Master}notes/spring0`, gt: `${DbPrefixes.Master}notes/spring/`,
lt: `${DbPrefixes.Master}notes0`, lt: `${DbPrefixes.Master}notes${inc('/')}`,
}, },
}, { }, {
Contents: [], Contents: [],
@ -619,7 +605,7 @@ const tests = [
Delimiter: '/', Delimiter: '/',
IsTruncated: true, IsTruncated: true,
NextContinuationToken: 'notes/summer/', NextContinuationToken: 'notes/summer/',
}), }, (e, input) => e.key > input.continuationToken),
new Test('all parameters v2 3/5', { new Test('all parameters v2 3/5', {
delimiter: '/', delimiter: '/',
@ -629,12 +615,12 @@ const tests = [
v2: true, v2: true,
}, { }, {
v0: { v0: {
gte: 'notes/summer0', gt: 'notes/summer/',
lt: 'notes0', lt: `notes${inc('/')}`,
}, },
v1: { v1: {
gte: `${DbPrefixes.Master}notes/summer0`, gt: `${DbPrefixes.Master}notes/summer/`,
lt: `${DbPrefixes.Master}notes0`, lt: `${DbPrefixes.Master}notes${inc('/')}`,
}, },
}, { }, {
Contents: [ Contents: [
@ -644,7 +630,7 @@ const tests = [
Delimiter: '/', Delimiter: '/',
IsTruncated: true, IsTruncated: true,
NextContinuationToken: 'notes/year.txt', NextContinuationToken: 'notes/year.txt',
}), }, (e, input) => e.key > input.continuationToken),
new Test('all parameters v2 4/5', { new Test('all parameters v2 4/5', {
delimiter: '/', delimiter: '/',
@ -655,11 +641,11 @@ const tests = [
}, { }, {
v0: { v0: {
gt: 'notes/year.txt', gt: 'notes/year.txt',
lt: 'notes0', lt: `notes${inc('/')}`,
}, },
v1: { v1: {
gt: `${DbPrefixes.Master}notes/year.txt`, gt: `${DbPrefixes.Master}notes/year.txt`,
lt: `${DbPrefixes.Master}notes0`, lt: `${DbPrefixes.Master}notes${inc('/')}`,
}, },
}, { }, {
Contents: [ Contents: [
@ -669,7 +655,7 @@ const tests = [
Delimiter: '/', Delimiter: '/',
IsTruncated: true, IsTruncated: true,
NextContinuationToken: 'notes/yore.rs', NextContinuationToken: 'notes/yore.rs',
}), }, (e, input) => e.key > input.startAfter),
new Test('all parameters v2 5/5', { new Test('all parameters v2 5/5', {
delimiter: '/', delimiter: '/',
@ -680,11 +666,11 @@ const tests = [
}, { }, {
v0: { v0: {
gt: 'notes/yore.rs', gt: 'notes/yore.rs',
lt: 'notes0', lt: `notes${inc('/')}`,
}, },
v1: { v1: {
gt: `${DbPrefixes.Master}notes/yore.rs`, gt: `${DbPrefixes.Master}notes/yore.rs`,
lt: `${DbPrefixes.Master}notes0`, lt: `${DbPrefixes.Master}notes${inc('/')}`,
}, },
}, { }, {
Contents: [], Contents: [],
@ -692,11 +678,35 @@ const tests = [
Delimiter: '/', Delimiter: '/',
IsTruncated: false, IsTruncated: false,
NextContinuationToken: undefined, NextContinuationToken: undefined,
}), }, (e, input) => e.key > input.startAfter),
]; ];
function getTestListing(mdParams, data, vFormat) { const alphabeticalOrderTests = [
{
params: {},
expectedValue: true,
}, {
params: {
alphabeticalOrder: undefined,
},
expectedValue: true,
}, {
params: {
alphabeticalOrder: true,
},
expectedValue: true,
}, {
params: {
alphabeticalOrder: false,
},
expectedValue: false,
},
];
function getTestListing(test, data, vFormat) {
return data return data
.filter(e => test.filter(e, test.input))
.map(obj => { .map(obj => {
if (vFormat === 'v0') { if (vFormat === 'v0') {
return obj; return obj;
@ -708,12 +718,7 @@ function getTestListing(mdParams, data, vFormat) {
}; };
} }
return assert.fail(`bad format ${vFormat}`); return assert.fail(`bad format ${vFormat}`);
}) });
.filter(e =>
(!mdParams.gt || e.key > mdParams.gt) &&
(!mdParams.gte || e.key >= mdParams.gte) &&
(!mdParams.lt || e.key < mdParams.lt),
);
} }
['v0', 'v1'].forEach(vFormat => { ['v0', 'v1'].forEach(vFormat => {
@ -727,7 +732,16 @@ function getTestListing(mdParams, data, vFormat) {
}); });
} }
assert.strictEqual(delimiter.skipping(), assert.strictEqual(delimiter.skipping(),
`${vFormat === 'v1' ? DbPrefixes.Master : ''}foo0`); `${vFormat === 'v1' ? DbPrefixes.Master : ''}foo/`);
});
it('Should set Delimiter alphabeticalOrder field to the expected value', () => {
alphabeticalOrderTests.forEach(test => {
const delimiter = new Delimiter(test.params);
assert.strictEqual(delimiter.alphabeticalOrder,
test.expectedValue,
`${JSON.stringify(test.params)}`);
});
}); });
tests.forEach(test => { tests.forEach(test => {
@ -737,13 +751,9 @@ function getTestListing(mdParams, data, vFormat) {
assert.deepStrictEqual(params, test.genMDParams[vFormat]); assert.deepStrictEqual(params, test.genMDParams[vFormat]);
}); });
it(`Should list ${test.name}`, () => { it(`Should list ${test.name}`, () => {
const listing = new Delimiter(test.input, logger, vFormat); // Simulate skip scan done by LevelDB
const mdParams = listing.genMDParams(); const d = getTestListing(test, data, vFormat);
const rawEntries = getTestListing(mdParams, data, vFormat); const res = performListing(d, Delimiter, test.input, logger, vFormat);
for (const entry of rawEntries) {
listing.filter(entry);
}
const res = listing.result();
assert.deepStrictEqual(res, test.output); assert.deepStrictEqual(res, test.output);
}); });
}); });
@ -752,16 +762,49 @@ function getTestListing(mdParams, data, vFormat) {
if (vFormat === 'v0') { if (vFormat === 'v0') {
tests.forEach(test => { tests.forEach(test => {
it(`Should list master versions ${test.name}`, () => { it(`Should list master versions ${test.name}`, () => {
const listing = new DelimiterMaster(test.input, logger, vFormat); // Simulate skip scan done by LevelDB
const mdParams = listing.genMDParams(); const d = dataVersioned.filter(e => test.filter(e, test.input));
const rawEntries = getTestListing(mdParams, dataVersioned, vFormat); const res = performListing(d, DelimiterMaster, test.input, logger, vFormat);
for (const entry of rawEntries) {
listing.filter(entry);
}
const res = listing.result();
assert.deepStrictEqual(res, test.output); assert.deepStrictEqual(res, test.output);
}); });
}); });
} }
it('Should filter values according to alphabeticalOrder parameter', () => {
let test = new Test('alphabeticalOrder parameter set', {
delimiter: '/',
alphabeticalOrder: true,
}, {
}, {
Contents: [
receivedNonAlphaData[0],
],
Delimiter: '/',
CommonPrefixes: [],
IsTruncated: false,
NextMarker: undefined,
});
let d = getTestListing(test, nonAlphabeticalData, vFormat);
let res = performListing(d, Delimiter, test.input, logger, vFormat);
assert.deepStrictEqual(res, test.output);
test = new Test('alphabeticalOrder parameter set', {
delimiter: '/',
alphabeticalOrder: false,
}, {
}, {
Contents: [
receivedNonAlphaData[0],
receivedNonAlphaData[1],
],
Delimiter: '/',
CommonPrefixes: [],
IsTruncated: false,
NextMarker: undefined,
});
d = getTestListing(test, nonAlphabeticalData, vFormat);
res = performListing(d, Delimiter, test.input, logger, vFormat);
assert.deepStrictEqual(res, test.output);
});
}); });
}); });

View File

@ -6,12 +6,19 @@ const DelimiterCurrent =
require('../../../../lib/algos/list/delimiterCurrent').DelimiterCurrent; require('../../../../lib/algos/list/delimiterCurrent').DelimiterCurrent;
const { const {
FILTER_ACCEPT, FILTER_ACCEPT,
FILTER_SKIP,
FILTER_END, FILTER_END,
} = require('../../../../lib/algos/list/tools'); } = require('../../../../lib/algos/list/tools');
const VSConst = const VSConst =
require('../../../../lib/versioning/constants').VersioningConstants; require('../../../../lib/versioning/constants').VersioningConstants;
const { DbPrefixes } = VSConst; const { DbPrefixes } = VSConst;
const VID_SEP = VSConst.VersionId.Separator;
const EmptyResult = {
Contents: [],
IsTruncated: false,
};
const fakeLogger = { const fakeLogger = {
trace: () => {}, trace: () => {},
debug: () => {}, debug: () => {},
@ -21,410 +28,101 @@ const fakeLogger = {
fatal: () => {}, fatal: () => {},
}; };
function getListingKey(key, vFormat) { function makeV1Key(key) {
if (vFormat === 'v0') { const keyPrefix = key.includes(VID_SEP) ?
return key; DbPrefixes.Version : DbPrefixes.Master;
} return `${keyPrefix}${key}`;
if (vFormat === 'v1') {
return `${DbPrefixes.Master}${key}`;
}
return assert.fail(`bad vFormat ${vFormat}`);
} }
['v0', 'v1'].forEach(v => { describe('DelimiterCurrent', () => {
describe(`DelimiterCurrent with ${v} bucket format`, () => { it('should accept entry starting with prefix', () => {
it('should return expected metadata parameters', () => { const delimiter = new DelimiterCurrent({ prefix: 'prefix' }, fakeLogger, 'v1');
const prefix = 'pre';
const marker = 'premark';
const beforeDate = '1970-01-01T00:00:00.005Z';
const excludedDataStoreName = 'location1';
const maxScannedLifecycleListingEntries = 2;
const delimiter = new DelimiterCurrent({
prefix,
marker,
beforeDate,
excludedDataStoreName,
maxScannedLifecycleListingEntries,
}, fakeLogger, v);
const expectedParams = { const masterKey = 'prefix1';
dataStoreName: { const date1 = '1970-01-01T00:00:00.001Z';
ne: excludedDataStoreName, const value1 = `{"last-modified": "${date1}"}`;
assert.strictEqual(delimiter.filter({ key: makeV1Key(masterKey), value: value1 }), FILTER_ACCEPT);
const expectedResult = {
Contents: [
{
key: masterKey,
value: value1,
}, },
lastModified: { ],
lt: beforeDate, IsTruncated: false,
};
assert.deepStrictEqual(delimiter.result(), expectedResult);
});
it('should skip entry not starting with prefix', () => {
const delimiter = new DelimiterCurrent({ prefix: 'prefix' }, fakeLogger, 'v1');
const listingKey = makeV1Key('noprefix');
assert.strictEqual(delimiter.filter({ key: listingKey, value: '' }), FILTER_SKIP);
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should accept a master and return it', () => {
const delimiter = new DelimiterCurrent({ }, fakeLogger, 'v1');
const masterKey = 'key';
const date1 = '1970-01-01T00:00:00.001Z';
const value1 = `{"last-modified": "${date1}"}`;
assert.strictEqual(delimiter.filter({
key: makeV1Key(masterKey),
value: value1,
}), FILTER_ACCEPT);
const expectedResult = {
Contents: [
{
key: masterKey,
value: value1,
}, },
gt: getListingKey('premark', v), ],
lt: getListingKey('prf', v), IsTruncated: false,
}; };
assert.deepStrictEqual(delimiter.genMDParams(), expectedParams);
assert.strictEqual(delimiter.maxScannedLifecycleListingEntries, 2); assert.deepStrictEqual(delimiter.result(), expectedResult);
}); });
it('should accept entry starting with prefix', () => { it('should accept the first master and return the truncated content', () => {
const delimiter = new DelimiterCurrent({ prefix: 'prefix' }, fakeLogger, v); const delimiter = new DelimiterCurrent({ maxKeys: 1 }, fakeLogger, 'v1');
const masterKey = 'prefix1'; const masterKey1 = 'key1';
const date1 = '1970-01-01T00:00:00.001Z'; const date1 = '1970-01-01T00:00:00.001Z';
const value1 = `{"last-modified": "${date1}"}`; const value1 = `{"last-modified": "${date1}"}`;
assert.strictEqual(delimiter.filter({ key: getListingKey(masterKey, v), value: value1 }), FILTER_ACCEPT);
assert.strictEqual(delimiter.filter({
const expectedResult = { key: makeV1Key(masterKey1),
Contents: [ value: value1,
{ }), FILTER_ACCEPT);
key: masterKey,
value: value1, const masterKey2 = 'key2';
}, const date2 = '1970-01-01T00:00:00.000Z';
], const value2 = `{"last-modified": "${date2}"}`;
IsTruncated: false,
}; assert.strictEqual(delimiter.filter({
key: makeV1Key(masterKey2),
assert.deepStrictEqual(delimiter.result(), expectedResult); value: value2,
}); }), FILTER_END);
it('should accept a master and return it', () => { const expectedResult = {
const delimiter = new DelimiterCurrent({ }, fakeLogger, v); Contents: [
{
const masterKey = 'key'; key: masterKey1,
value: value1,
const date1 = '1970-01-01T00:00:00.001Z'; },
const value1 = `{"last-modified": "${date1}"}`; ],
NextMarker: masterKey1,
assert.strictEqual(delimiter.filter({ IsTruncated: true,
key: getListingKey(masterKey, v), };
value: value1,
}), FILTER_ACCEPT); assert.deepStrictEqual(delimiter.result(), expectedResult);
const expectedResult = {
Contents: [
{
key: masterKey,
value: value1,
},
],
IsTruncated: false,
};
assert.deepStrictEqual(delimiter.result(), expectedResult);
});
it('should accept the first master and return the truncated content', () => {
const delimiter = new DelimiterCurrent({ maxKeys: 1 }, fakeLogger, v);
const masterKey1 = 'key1';
const date1 = '1970-01-01T00:00:00.001Z';
const value1 = `{"last-modified": "${date1}"}`;
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey1, v),
value: value1,
}), FILTER_ACCEPT);
const masterKey2 = 'key2';
const date2 = '1970-01-01T00:00:00.000Z';
const value2 = `{"last-modified": "${date2}"}`;
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey2, v),
value: value2,
}), FILTER_END);
const expectedResult = {
Contents: [
{
key: masterKey1,
value: value1,
},
],
NextMarker: masterKey1,
IsTruncated: true,
};
assert.deepStrictEqual(delimiter.result(), expectedResult);
});
it('should return the object created before beforeDate', () => {
const beforeDate = '1970-01-01T00:00:00.003Z';
const delimiter = new DelimiterCurrent({ beforeDate }, fakeLogger, v);
const masterKey1 = 'key1';
const date1 = '1970-01-01T00:00:00.004Z';
const value1 = `{"last-modified": "${date1}"}`;
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey1, v),
value: value1,
}), FILTER_ACCEPT);
const masterKey2 = 'key2';
const date2 = '1970-01-01T00:00:00.000Z';
const value2 = `{"last-modified": "${date2}"}`;
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey2, v),
value: value2,
}), FILTER_ACCEPT);
const expectedResult = {
Contents: [
{
key: masterKey2,
value: value2,
},
],
IsTruncated: false,
};
assert.deepStrictEqual(delimiter.result(), expectedResult);
});
it('should return an empty list if last-modified is an empty string', () => {
const beforeDate = '1970-01-01T00:00:00.003Z';
const delimiter = new DelimiterCurrent({ beforeDate }, fakeLogger, v);
const masterKey0 = 'key0';
const value0 = '{"last-modified": ""}';
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey0, v),
value: value0,
}), FILTER_ACCEPT);
const expectedResult = {
Contents: [],
IsTruncated: false,
};
assert.deepStrictEqual(delimiter.result(), expectedResult);
});
it('should return an empty list if last-modified is undefined', () => {
const beforeDate = '1970-01-01T00:00:00.003Z';
const delimiter = new DelimiterCurrent({ beforeDate }, fakeLogger, v);
const masterKey0 = 'key0';
const value0 = '{}';
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey0, v),
value: value0,
}), FILTER_ACCEPT);
const expectedResult = {
Contents: [],
IsTruncated: false,
};
assert.deepStrictEqual(delimiter.result(), expectedResult);
});
it('should return the object with dataStore name that does not match', () => {
const beforeDate = '1970-01-01T00:00:00.005Z';
const excludedDataStoreName = 'location-excluded';
const delimiter = new DelimiterCurrent({ beforeDate, excludedDataStoreName }, fakeLogger, v);
const masterKey1 = 'key1';
const date1 = '1970-01-01T00:00:00.004Z';
const value1 = `{"last-modified": "${date1}", "dataStoreName": "valid"}`;
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey1, v),
value: value1,
}), FILTER_ACCEPT);
const masterKey2 = 'key2';
const date2 = '1970-01-01T00:00:00.000Z';
const value2 = `{"last-modified": "${date2}", "dataStoreName": "${excludedDataStoreName}"}`;
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey2, v),
value: value2,
}), FILTER_ACCEPT);
const expectedResult = {
Contents: [
{
key: masterKey1,
value: value1,
},
],
IsTruncated: false,
};
assert.deepStrictEqual(delimiter.result(), expectedResult);
});
it('should stop fetching entries if the max keys are reached and return the accurate next marker', () => {
const beforeDate = '1970-01-01T00:00:00.005Z';
const excludedDataStoreName = 'location-excluded';
const delimiter = new DelimiterCurrent({ beforeDate, excludedDataStoreName, maxKeys: 1 }, fakeLogger, v);
const masterKey1 = 'key1';
const date1 = '1970-01-01T00:00:00.004Z';
const value1 = `{"last-modified": "${date1}", "dataStoreName": "valid"}`;
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey1, v),
value: value1,
}), FILTER_ACCEPT);
const masterKey2 = 'key2';
const date2 = '1970-01-01T00:00:00.000Z';
const value2 = `{"last-modified": "${date2}", "dataStoreName": "${excludedDataStoreName}"}`;
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey2, v),
value: value2,
}), FILTER_END);
const expectedResult = {
Contents: [
{
key: masterKey1,
value: value1,
},
],
IsTruncated: true,
NextMarker: masterKey1,
};
assert.deepStrictEqual(delimiter.result(), expectedResult);
});
it('should return the object created before beforeDate and with dataStore name that does not match', () => {
const beforeDate = '1970-01-01T00:00:00.003Z';
const excludedDataStoreName = 'location-excluded';
const delimiter = new DelimiterCurrent({ beforeDate, excludedDataStoreName }, fakeLogger, v);
const masterKey1 = 'key1';
const date1 = '1970-01-01T00:00:00.004Z';
const value1 = `{"last-modified": "${date1}", "dataStoreName": "valid"}`;
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey1, v),
value: value1,
}), FILTER_ACCEPT);
const masterKey2 = 'key2';
const date2 = '1970-01-01T00:00:00.001Z';
const value2 = `{"last-modified": "${date2}", "dataStoreName": "valid"}`;
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey2, v),
value: value2,
}), FILTER_ACCEPT);
const masterKey3 = 'key3';
const date3 = '1970-01-01T00:00:00.000Z';
const value3 = `{"last-modified": "${date3}", "dataStoreName": "${excludedDataStoreName}"}`;
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey3, v),
value: value3,
}), FILTER_ACCEPT);
const expectedResult = {
Contents: [
{
key: masterKey2,
value: value2,
},
],
IsTruncated: false,
};
assert.deepStrictEqual(delimiter.result(), expectedResult);
});
it('should return the objects pushed before max scanned entries value is reached', () => {
const beforeDate = '1970-01-01T00:00:00.003Z';
const maxScannedLifecycleListingEntries = 2;
const delimiter = new DelimiterCurrent({ beforeDate, maxScannedLifecycleListingEntries }, fakeLogger, v);
const masterKey1 = 'key1';
const date1 = '1970-01-01T00:00:00.000Z';
const value1 = `{"last-modified": "${date1}"}`;
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey1, v),
value: value1,
}), FILTER_ACCEPT);
const masterKey2 = 'key2';
const date2 = '1970-01-01T00:00:00.001Z';
const value2 = `{"last-modified": "${date2}"}`;
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey2, v),
value: value2,
}), FILTER_ACCEPT);
const masterKey3 = 'key3';
const date3 = '1970-01-01T00:00:00.002Z';
const value3 = `{"last-modified": "${date3}"}`;
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey3, v),
value: value3,
}), FILTER_END);
const expectedResult = {
Contents: [
{
key: masterKey1,
value: value1,
},
{
key: masterKey2,
value: value2,
},
],
NextMarker: masterKey2,
IsTruncated: true,
};
assert.deepStrictEqual(delimiter.result(), expectedResult);
});
it('should return empty content after max scanned entries value is reached', () => {
const beforeDate = '1970-01-01T00:00:00.003Z';
const maxScannedLifecycleListingEntries = 2;
const delimiter = new DelimiterCurrent({ beforeDate, maxScannedLifecycleListingEntries }, fakeLogger, v);
const masterKey1 = 'key1';
const date1 = '1970-01-01T00:00:00.004Z';
const value1 = `{"last-modified": "${date1}"}`;
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey1, v),
value: value1,
}), FILTER_ACCEPT);
const masterKey2 = 'key2';
const date2 = '1970-01-01T00:00:00.005Z';
const value2 = `{"last-modified": "${date2}"}`;
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey2, v),
value: value2,
}), FILTER_ACCEPT);
const masterKey3 = 'key3';
const date3 = '1970-01-01T00:00:00.006Z';
const value3 = `{"last-modified": "${date3}"}`;
assert.strictEqual(delimiter.filter({
key: getListingKey(masterKey3, v),
value: value3,
}), FILTER_END);
const expectedResult = {
Contents: [],
NextMarker: masterKey2,
IsTruncated: true,
};
assert.deepStrictEqual(delimiter.result(), expectedResult);
});
}); });
}); });

View File

@ -0,0 +1,493 @@
'use strict'; // eslint-disable-line strict
const assert = require('assert');
const DelimiterMaster =
require('../../../../lib/algos/list/delimiterMaster').DelimiterMaster;
const {
FILTER_ACCEPT,
FILTER_SKIP,
SKIP_NONE,
inc,
} = require('../../../../lib/algos/list/tools');
const VSConst =
require('../../../../lib/versioning/constants').VersioningConstants;
const Version = require('../../../../lib/versioning/Version').Version;
const { generateVersionId } = require('../../../../lib/versioning/VersionID');
const { DbPrefixes } = VSConst;
const zpad = require('../../helpers').zpad;
const VID_SEP = VSConst.VersionId.Separator;
const EmptyResult = {
CommonPrefixes: [],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
};
const fakeLogger = {
trace: () => {},
debug: () => {},
info: () => {},
warn: () => {},
error: () => {},
fatal: () => {},
};
function getListingKey(key, vFormat) {
if (vFormat === 'v0') {
return key;
}
if (vFormat === 'v1') {
return `${DbPrefixes.Master}${key}`;
}
return assert.fail(`bad vFormat ${vFormat}`);
}
['v0', 'v1'].forEach(vFormat => {
describe(`Delimiter All masters listing algorithm vFormat=${vFormat}`, () => {
it('should return SKIP_NONE for DelimiterMaster when both NextMarker ' +
'and NextContinuationToken are undefined', () => {
const delimiter = new DelimiterMaster({ delimiter: '/' }, fakeLogger, vFormat);
assert.strictEqual(delimiter.NextMarker, undefined);
// When there is no NextMarker or NextContinuationToken, it should
// return SKIP_NONE
assert.strictEqual(delimiter.skipping(), SKIP_NONE);
});
it('should return <key><VersionIdSeparator> for DelimiterMaster when ' +
'NextMarker is set and there is a delimiter', () => {
const key = 'key';
const delimiter = new DelimiterMaster({ delimiter: '/', marker: key },
fakeLogger, vFormat);
/* Filter a master version to set NextMarker. */
const listingKey = getListingKey(key, vFormat);
delimiter.filter({ key: listingKey, value: '' });
assert.strictEqual(delimiter.NextMarker, key);
/* With a delimiter skipping should return previous key + VID_SEP
* (except when a delimiter is set and the NextMarker ends with the
* delimiter) . */
assert.strictEqual(delimiter.skipping(), listingKey + VID_SEP);
});
it('should return <key><VersionIdSeparator> for DelimiterMaster when ' +
'NextContinuationToken is set and there is a delimiter', () => {
const key = 'key';
const delimiter = new DelimiterMaster(
{ delimiter: '/', startAfter: key, v2: true },
fakeLogger, vFormat);
// Filter a master version to set NextContinuationToken
const listingKey = getListingKey(key, vFormat);
delimiter.filter({ key: listingKey, value: '' });
assert.strictEqual(delimiter.NextContinuationToken, key);
assert.strictEqual(delimiter.skipping(), listingKey + VID_SEP);
});
it('should return NextMarker for DelimiterMaster when NextMarker is set' +
', there is a delimiter and the key ends with the delimiter', () => {
const delimiterChar = '/';
const keyWithEndingDelimiter = `key${delimiterChar}`;
const delimiter = new DelimiterMaster({
delimiter: delimiterChar,
marker: keyWithEndingDelimiter,
}, fakeLogger, vFormat);
/* When a delimiter is set and the NextMarker ends with the
* delimiter it should return the next marker value. */
assert.strictEqual(delimiter.NextMarker, keyWithEndingDelimiter);
const skipKey = vFormat === 'v1' ?
`${DbPrefixes.Master}${keyWithEndingDelimiter}` :
keyWithEndingDelimiter;
assert.strictEqual(delimiter.skipping(), skipKey);
});
it('should skip entries not starting with prefix', () => {
const delimiter = new DelimiterMaster({ prefix: 'prefix' }, fakeLogger, vFormat);
const listingKey = getListingKey('wrong', vFormat);
assert.strictEqual(delimiter.filter({ key: listingKey }), FILTER_SKIP);
assert.strictEqual(delimiter.NextMarker, undefined);
assert.strictEqual(delimiter.prvKey, undefined);
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should skip entries inferior to next marker', () => {
const delimiter = new DelimiterMaster({ marker: 'b' }, fakeLogger, vFormat);
const listingKey = getListingKey('a', vFormat);
assert.strictEqual(delimiter.filter({ key: listingKey }), FILTER_SKIP);
assert.strictEqual(delimiter.NextMarker, 'b');
assert.strictEqual(delimiter.prvKey, undefined);
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should accept a master version', () => {
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const key = 'key';
const value = '';
const listingKey = getListingKey(key, vFormat);
assert.strictEqual(delimiter.filter({ key: listingKey, value }), FILTER_ACCEPT);
if (vFormat === 'v0') {
assert.strictEqual(delimiter.prvKey, key);
}
assert.strictEqual(delimiter.NextMarker, key);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [],
Contents: [{ key, value }],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
});
});
it('should return good values for entries with different common prefixes', () => {
const delimiterChar = '/';
const commonPrefix1 = `commonPrefix1${delimiterChar}`;
const commonPrefix2 = `commonPrefix2${delimiterChar}`;
const prefix1Key1 = `${commonPrefix1}key1`;
const prefix1Key2 = `${commonPrefix1}key2`;
const prefix2Key1 = `${commonPrefix2}key1`;
const value = 'value';
const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
fakeLogger, vFormat);
/* Filter the first entry with a common prefix. It should be
* accepted and added to the result. */
assert.strictEqual(delimiter.filter({
key: getListingKey(prefix1Key1, vFormat),
value,
}),
FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: delimiterChar,
});
/* Filter the second entry with the same common prefix than the
* first entry. It should be skipped and not added to the result. */
assert.strictEqual(delimiter.filter({
key: getListingKey(prefix1Key2, vFormat),
value,
}),
FILTER_SKIP);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: delimiterChar,
});
/* Filter an entry with a new common prefix. It should be accepted
* and not added to the result. */
assert.strictEqual(delimiter.filter({
key: getListingKey(prefix2Key1, vFormat),
value,
}),
FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1, commonPrefix2],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: delimiterChar,
});
});
if (vFormat === 'v0') {
it('should accept a PHD version as first input', () => {
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const keyPHD = 'keyPHD';
const objPHD = {
key: keyPHD,
value: Version.generatePHDVersion(generateVersionId('', '')),
};
/* When filtered, it should return FILTER_ACCEPT and set the prvKey
* to undefined. It should not be added to the result content or common
* prefixes. */
assert.strictEqual(delimiter.filter(objPHD), FILTER_ACCEPT);
assert.strictEqual(delimiter.prvKey, undefined);
assert.strictEqual(delimiter.NextMarker, undefined);
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should accept a PHD version', () => {
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const key = 'keyA';
const value = '';
const keyPHD = 'keyBPHD';
const objPHD = {
key: keyPHD,
value: Version.generatePHDVersion(generateVersionId('', '')),
};
/* Filter a master version to set the NextMarker, the prvKey and add
* and element in result content. */
delimiter.filter({ key, value });
/* When filtered, it should return FILTER_ACCEPT and set the prvKey
* to undefined. It should not be added to the result content or common
* prefixes. */
assert.strictEqual(delimiter.filter(objPHD), FILTER_ACCEPT);
assert.strictEqual(delimiter.prvKey, undefined);
assert.strictEqual(delimiter.NextMarker, key);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [],
Contents: [{ key, value }],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
});
});
it('should accept a version after a PHD', () => {
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const masterKey = 'key';
const keyVersion = `${masterKey}${VID_SEP}version`;
const value = '';
const objPHD = {
key: masterKey,
value: Version.generatePHDVersion(generateVersionId('', '')),
};
/* Filter the PHD object. */
delimiter.filter(objPHD);
/* The filtering of the PHD object has no impact, the version is
* accepted and added to the result. */
assert.strictEqual(delimiter.filter({
key: keyVersion,
value,
}), FILTER_ACCEPT);
assert.strictEqual(delimiter.prvKey, masterKey);
assert.strictEqual(delimiter.NextMarker, masterKey);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [],
Contents: [{ key: masterKey, value }],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
});
});
it('should skip a delete marker version', () => {
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const version = new Version({ isDeleteMarker: true });
const key = 'key';
const obj = {
key: `${key}${VID_SEP}version`,
value: version.toString(),
};
/* When filtered, it should return FILTER_SKIP and set the prvKey. It
* should not be added to the result content or common prefixes. */
assert.strictEqual(delimiter.filter(obj), FILTER_SKIP);
assert.strictEqual(delimiter.NextMarker, undefined);
assert.strictEqual(delimiter.prvKey, key);
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should skip version after a delete marker master', () => {
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const version = new Version({ isDeleteMarker: true });
const key = 'key';
const versionKey = `${key}${VID_SEP}version`;
delimiter.filter({ key, value: version.toString() });
assert.strictEqual(delimiter.filter({
key: versionKey,
value: 'value',
}), FILTER_SKIP);
assert.strictEqual(delimiter.NextMarker, undefined);
assert.strictEqual(delimiter.prvKey, key);
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should accept a new master key after a delete marker master', () => {
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const version = new Version({ isDeleteMarker: true });
const key1 = 'key1';
const key2 = 'key2';
const value = 'value';
delimiter.filter({ key: key1, value: version.toString() });
assert.strictEqual(delimiter.filter({
key: key2,
value: 'value',
}), FILTER_ACCEPT);
assert.strictEqual(delimiter.NextMarker, key2);
assert.strictEqual(delimiter.prvKey, key2);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [],
Contents: [{ key: key2, value }],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
});
});
it('should accept the master version and skip the other ones', () => {
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const masterKey = 'key';
const masterValue = 'value';
const versionKey = `${masterKey}${VID_SEP}version`;
const versionValue = 'versionvalue';
/* Filter the master version. */
delimiter.filter({ key: masterKey, value: masterValue });
/* Version is skipped, not added to the result. The delimiter
* NextMarker and prvKey value are unmodified and set to the
* masterKey. */
assert.strictEqual(delimiter.filter({
key: versionKey,
value: versionValue,
}), FILTER_SKIP);
assert.strictEqual(delimiter.NextMarker, masterKey);
assert.strictEqual(delimiter.prvKey, masterKey);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [],
Contents: [{ key: masterKey, value: masterValue }],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
});
});
it('should return good listing result for version', () => {
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const masterKey = 'key';
const versionKey1 = `${masterKey}${VID_SEP}version1`;
const versionKey2 = `${masterKey}${VID_SEP}version2`;
const value2 = 'value2';
/* Filter the PHD version. */
assert.strictEqual(delimiter.filter({
key: masterKey,
value: '{ "isPHD": true, "value": "version" }',
}), FILTER_ACCEPT);
/* Filter a delete marker version. */
assert.strictEqual(delimiter.filter({
key: versionKey1,
value: '{ "isDeleteMarker": true }',
}), FILTER_ACCEPT);
/* Filter a last version with a specific value. */
assert.strictEqual(delimiter.filter({
key: versionKey2,
value: value2,
}), FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [],
Contents: [{ key: masterKey, value: value2 }],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
});
});
/* We test here the internal management of the prvKey field of the
* DelimiterMaster class, in particular once it has been set to an entry
* key before to finally skip this entry because of an already present
* common prefix. */
it('should accept a version after skipping an object because of its commonPrefix', () => {
const delimiterChar = '/';
const commonPrefix1 = `commonPrefix1${delimiterChar}`;
const commonPrefix2 = `commonPrefix2${delimiterChar}`;
const prefix1Key1 = `${commonPrefix1}key1`;
const prefix1Key2 = `${commonPrefix1}key2`;
const prefix2VersionKey1 = `${commonPrefix2}key1${VID_SEP}version`;
const value = 'value';
const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
fakeLogger, vFormat);
/* Filter the two first entries with the same common prefix to add
* it to the result and reach the state where an entry is skipped
* because of an already present common prefix in the result. */
delimiter.filter({ key: prefix1Key1, value });
delimiter.filter({ key: prefix1Key2, value });
/* Filter an object with a key containing a version part and a new
* common prefix. It should be accepted and the new common prefix
* added to the result. */
assert.strictEqual(delimiter.filter({
key: prefix2VersionKey1,
value,
}), FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1, commonPrefix2],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: delimiterChar,
});
});
it('should skip a versioned entry when there is a delimiter and the key ' +
'starts with the NextMarker value', () => {
const delimiterChar = '/';
const commonPrefix = `commonPrefix${delimiterChar}`;
const key = `${commonPrefix}key${VID_SEP}version`;
const value = 'value';
const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
fakeLogger, vFormat);
/* TODO: should be set to a whole key instead of just a common prefix
* once ZENKO-1048 is fixed. */
delimiter.NextMarker = commonPrefix;
assert.strictEqual(delimiter.filter({ key, value }), FILTER_SKIP);
});
it('should return good skipping value for DelimiterMaster on replay keys', () => {
const delimiter = new DelimiterMaster(
{ delimiter: '/', v2: true },
fakeLogger, vFormat);
for (let i = 0; i < 10; i++) {
delimiter.filter({
key: `foo/${zpad(i)}`,
value: '{}',
});
}
// simulate a listing that goes through a replay key, ...
assert.strictEqual(
delimiter.filter({
key: `${DbPrefixes.Replay}xyz`,
value: 'abcdef',
}),
FILTER_SKIP);
// ...it should skip the whole replay prefix
assert.strictEqual(delimiter.skipping(), DbPrefixes.Replay);
// simulate a listing that reaches regular object keys
// beyond the replay prefix, ...
assert.strictEqual(
delimiter.filter({
key: `${inc(DbPrefixes.Replay)}foo/bar`,
value: '{}',
}),
FILTER_ACCEPT);
// ...it should return to skipping by prefix as usual
assert.strictEqual(delimiter.skipping(), `${inc(DbPrefixes.Replay)}foo/`);
});
}
});
});

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More