Compare commits
388 Commits
task/arsen
...
developmen
Author | SHA1 | Date |
---|---|---|
Vitaliy Filippov | 19855115ae | |
Vitaliy Filippov | 329d8ef32c | |
Vitaliy Filippov | f0ded4ea4f | |
Vitaliy Filippov | 3eea263384 | |
Vitaliy Filippov | c26d4f7d70 | |
Vitaliy Filippov | 63137e7a7b | |
Vitaliy Filippov | fdb23b1cd2 | |
Vitaliy Filippov | 4120eac127 | |
Maha Benzekri | d9bbd6cf3e | |
Maha Benzekri | 65e89d286d | |
Maha Benzekri | dcbc5ca98f | |
Maha Benzekri | 817bb836ec | |
Maha Benzekri | e3e4b2aea7 | |
Francois Ferrand | 9cd72221e8 | |
Francois Ferrand | bdcd4685ad | |
Francois Ferrand | b2b6c47ba7 | |
Jonathan Gramain | da173d53b4 | |
Jonathan Gramain | 7eb2701f21 | |
Jonathan Gramain | 6ec3c8e10d | |
Jonathan Gramain | 7aaf277db2 | |
Francois Ferrand | 67421f8c76 | |
Francois Ferrand | bf2260b1ae | |
Francois Ferrand | 11e0e1b489 | |
Anurag Mittal | f13ec2cf4c | |
Anurag Mittal | e369c7e6d2 | |
Anurag Mittal | c5c1db4568 | |
Anurag Mittal | 58f4d3cb3a | |
Anurag Mittal | b049f39e2a | |
williamlardier | 30eaaf15eb | |
williamlardier | 9d16fb0a34 | |
williamlardier | cdc612f379 | |
williamlardier | 61dd65b2c4 | |
bert-e | 2c0696322e | |
Maha Benzekri | c464a70b90 | |
Maha Benzekri | af07bb3df4 | |
Maha Benzekri | 1858654f34 | |
Maha Benzekri | 0475c8520a | |
Maha Benzekri | 31a4de5372 | |
Maha Benzekri | 0c53d13439 | |
Maha Benzekri | cad8b14df1 | |
Nicolas Humbert | fe29bacc79 | |
Nicolas Humbert | a86cff4631 | |
Kerkesni | f13a5d79ea | |
Maha Benzekri | ca8f570f15 | |
Maha Benzekri | a4bca10faf | |
Jonathan Gramain | c2ab4a2052 | |
Jonathan Gramain | fd0aa314eb | |
Jonathan Gramain | a643a3e6cc | |
Jonathan Gramain | e9d815cc9d | |
Jonathan Gramain | c86d24fc8f | |
Jonathan Gramain | 3b6d3838f5 | |
Jonathan Gramain | fcdfa889be | |
Mickael Bourgois | 5b8fcf0313 | |
Mickael Bourgois | bdfde26fe4 | |
Mickael Bourgois | e53613783a | |
Mickael Bourgois | 69dbbb143a | |
Mickael Bourgois | 403c4e5040 | |
Nicolas Humbert | a1dc2bd84d | |
Nicolas Humbert | 01409d690c | |
Nicolas Humbert | 9ee40f343b | |
bert-e | 77ed018b4f | |
bert-e | f77700236f | |
Nicolas Humbert | 43ff16b28a | |
bert-e | 05c628728d | |
Nicolas Humbert | 2a807dc4ef | |
Nicolas Humbert | 1f8b0a4032 | |
bert-e | 0dd7fe9875 | |
Mickael Bourgois | f7a6af8d9a | |
Mickael Bourgois | e6d0eff1a8 | |
Mickael Bourgois | 9d558351e7 | |
Mickael Bourgois | 68150da72e | |
Mickael Bourgois | 2b2c4bc50e | |
Mickael Bourgois | 3068086a97 | |
Mickael Bourgois | 0af7eb5530 | |
bert-e | 7e372b7bd5 | |
bert-e | a121810552 | |
bert-e | 9bf1bcc483 | |
Nicolas Humbert | 06402c6c94 | |
Nicolas Humbert | a6f3c82827 | |
Nicolas Humbert | f1891851b3 | |
bert-e | a1eed4fefb | |
Nicolas Humbert | 68204448a1 | |
Nicolas Humbert | 40e271f7e2 | |
bert-e | d8f7f18f5a | |
bert-e | 5f4d7afefb | |
bert-e | 2482fdfafc | |
bert-e | e151b3fff1 | |
Nicolas Humbert | b8bbdbbd81 | |
Nicolas Humbert | 46258bca74 | |
williamlardier | b6bc11881a | |
williamlardier | 648257612b | |
williamlardier | 7423fac674 | |
williamlardier | 9647043a02 | |
williamlardier | f9e1f91791 | |
williamlardier | 9c5bc2bfe0 | |
Jonathan Gramain | 1a0a981271 | |
bert-e | a45b2eb6a4 | |
bert-e | b00378d46d | |
Mickael Bourgois | 2c3bfb16ef | |
Jonathan Gramain | c72d8be223 | |
Jonathan Gramain | f63cb3c762 | |
bert-e | 15fd621c5c | |
bert-e | effbf63dd4 | |
bert-e | 285fe2f63b | |
bert-e | 1d8ebe6a9c | |
bert-e | 00555597e0 | |
bert-e | bddc2ccd01 | |
Jonathan Gramain | 7908654b51 | |
Jonathan Gramain | 0d7cf8d40a | |
Jonathan Gramain | c4c75e976c | |
Jonathan Gramain | 1266a14253 | |
williamlardier | 851c72bd0f | |
bert-e | 722b6ae699 | |
bert-e | 29925a15ad | |
williamlardier | 6b64f50450 | |
Jonathan Gramain | 8dc3ba7ca6 | |
bert-e | 3c2283b062 | |
Jonathan Gramain | a6a76acede | |
Jonathan Gramain | 6a116734a9 | |
Jonathan Gramain | 9325ea4996 | |
Jonathan Gramain | 33ba89f0cf | |
Jonathan Gramain | c67331d350 | |
Jonathan Gramain | 6d6f1860ef | |
Nicolas Humbert | cbe6a5e2d6 | |
Mickael Bourgois | be1557d972 | |
Mickael Bourgois | a03463061c | |
Mickael Bourgois | 8ad0ea73a7 | |
Mickael Bourgois | a94040d13b | |
Mickael Bourgois | f265ed6122 | |
Mickael Bourgois | 7301c706fd | |
Mickael Bourgois | bfc8dee559 | |
Frédéric Meinnel | 5a5ef7c572 | |
Frédéric Meinnel | 918c2c5473 | |
Frédéric Meinnel | 29f39ab480 | |
Frédéric Meinnel | b7ac7f4616 | |
Frédéric Meinnel | f8ce90f9c3 | |
Frédéric Meinnel | 5734d11cf1 | |
Frédéric Meinnel | 4da59769d2 | |
Frédéric Meinnel | 60573991ee | |
Jonathan Gramain | 6f58f9dd68 | |
Jonathan Gramain | 3b9c93be68 | |
Jonathan Gramain | 081af3e795 | |
bert-e | 042f541a45 | |
bert-e | 63bf2cb5b1 | |
bert-e | 39f42d9cb4 | |
Mickael Bourgois | 02f126f040 | |
bert-e | 1477a70e47 | |
Mickael Bourgois | 7233ec2635 | |
Mickael Bourgois | c4b44016bc | |
Mickael Bourgois | a78a84faa7 | |
Mickael Bourgois | c3ff6526a1 | |
Frédéric Meinnel | 59d47a3e21 | |
Frédéric Meinnel | 6b61347c29 | |
Mickael Bourgois | 4bf29524eb | |
Mickael Bourgois | 9aa001c4d1 | |
Frédéric Meinnel | aea4663ff2 | |
Frédéric Meinnel | 5012e9209c | |
Frédéric Meinnel | 1568ad59c6 | |
bert-e | c2f6b45116 | |
bert-e | a0322b131c | |
Mickael Bourgois | b5487e3c94 | |
bert-e | 993b9e6093 | |
bert-e | ddd6c87831 | |
Mickael Bourgois | f2974cbd07 | |
bert-e | 7440794d93 | |
Mickael Bourgois | 1efab676bc | |
Mickael Bourgois | a167e1d5fa | |
Mickael Bourgois | c7e153917a | |
bert-e | 087369b37d | |
bert-e | 2d2030dfe4 | |
bert-e | 45cc4aa79e | |
Will Toozs | da80e12dab | |
Will Toozs | a7cf94d0fe | |
Jonathan Gramain | 2a82095d03 | |
Jonathan Gramain | 44b3d25459 | |
Jonathan Gramain | f1d6e30fb6 | |
Jonathan Gramain | 9186643caa | |
Jonathan Gramain | 485a76ceb9 | |
Jonathan Gramain | 00109a2c44 | |
Jonathan Gramain | aed1247825 | |
Jonathan Gramain | 0507c04ce9 | |
Will Toozs | 62736abba4 | |
Will Toozs | 97118f09c4 | |
Will Toozs | 5a84a8c0ad | |
bert-e | 37234efd14 | |
Jonathan Gramain | 2799381ef2 | |
Jonathan Gramain | a3f13e5387 | |
Jonathan Gramain | f4e83086d6 | |
Jonathan Gramain | d08a267965 | |
Jonathan Gramain | 063a2fb8fb | |
Jonathan Gramain | 1bc3360daf | |
Jonathan Gramain | 206f14bdf5 | |
Maha Benzekri | 74ff1691a0 | |
Maha Benzekri | 5ffae72693 | |
Maha Benzekri | 477a574500 | |
bert-e | 2a4ea38301 | |
bert-e | df4c22154e | |
Maha Benzekri | 3642ac03b2 | |
Francois Ferrand | d800179f86 | |
Francois Ferrand | c1c45a4af9 | |
Francois Ferrand | da536ed037 | |
Nicolas Humbert | 06901104e8 | |
Nicolas Humbert | a99a6d9d97 | |
Nicolas Humbert | 06244059a8 | |
Nicolas Humbert | 079f631711 | |
Benoit A. | 863f45d256 | |
KillianG | 4b642cf8b4 | |
KillianG | 2537f8aa9a | |
Maha Benzekri | 7866a1d06f | |
Maha Benzekri | 29ef2ef265 | |
Maha Benzekri | 1509f1bdfe | |
Maha Benzekri | 13d349d211 | |
Maha Benzekri | 34a32c967d | |
Maha Benzekri | 90ab985271 | |
Maha Benzekri | fbf5562a11 | |
bert-e | d79ed1b9c8 | |
bert-e | c34ad0dc31 | |
Maha Benzekri | df5ff0f400 | |
Maha Benzekri | 777783171a | |
Will Toozs | 39988e52e2 | |
Will Toozs | 79c82a4c3d | |
williamlardier | 17b5bbc233 | |
williamlardier | 4aa8b5cc6e | |
williamlardier | 5deed6c2e1 | |
Nicolas Humbert | af34571771 | |
Nicolas Humbert | 79b83a9067 | |
Nicolas Humbert | 5fd675a316 | |
Nicolas Humbert | d84cc974d3 | |
Maha Benzekri | dcf0f902ff | |
Maha Benzekri | 0177fbe98f | |
Maha Benzekri | f49cea3914 | |
Maha Benzekri | 73c6f41fa3 | |
bert-e | 5b66f8d089 | |
bert-e | b61d178b18 | |
Maha Benzekri | 9ea39c6ed9 | |
Florent Monjalet | e51b06cfea | |
Florent Monjalet | f2bc701f8c | |
Nicolas Humbert | 4d6b03ba47 | |
Nicolas Humbert | f03f049683 | |
Nicolas Humbert | d7b51de024 | |
Nicolas Humbert | cf51adf1c7 | |
Nicolas Humbert | 8a7c1be2d1 | |
Nicolas Humbert | c049df0a97 | |
Nicolas Humbert | 2b2667e29a | |
Nicolas Humbert | 8eb4a29c36 | |
bert-e | 862317703e | |
Nicolas Humbert | e69a97f240 | |
Nicolas Humbert | 81e838000f | |
bert-e | 547ce816e0 | |
Nicolas Humbert | 8256d6debf | |
bert-e | 15d5e93a2d | |
Nicolas Humbert | 69c1698eb7 | |
bert-e | d11bcb56e9 | |
Nicolas Humbert | c2cd90925f | |
bert-e | 0ed35c3d86 | |
bert-e | b1723594eb | |
Nicolas Humbert | c0218821ff | |
Nicolas Humbert | 49e32758fb | |
Nicolas Humbert | e13d0f5ed8 | |
Nicolas Humbert | 0d5907956f | |
Nicolas Humbert | f0c5d60ce9 | |
Nicolas Humbert | 8c2f4cf357 | |
Nicolas Humbert | f3f1da9bb3 | |
Nicolas Humbert | 036b75842e | |
Nicolas Humbert | 7ac5774635 | |
Nicolas Humbert | f3b928fce0 | |
Nicolas Humbert | 7173a357d9 | |
Nicolas Humbert | 7c4f461196 | |
Nicolas Humbert | 0a4d6f862f | |
bert-e | 8716fee67d | |
bert-e | 2938bb0c88 | |
williamlardier | 05c93446ab | |
williamlardier | 8d758327dd | |
williamlardier | be63c09624 | |
Nicolas Humbert | 4615875462 | |
Rahul Padigela | bdb59a0e63 | |
bert-e | a89d1d8d75 | |
Rahul Padigela | 89e5f7dffe | |
williamlardier | 57e84980c8 | |
williamlardier | 51bfd41bea | |
Nicolas Humbert | 96cbaeb821 | |
Nicolas Humbert | cb01346d07 | |
Nicolas Humbert | 3f24336b83 | |
Nicolas Humbert | 1e66518a79 | |
bert-e | 15b68fa9fa | |
Nicolas Humbert | 51703a65f5 | |
bert-e | 09aaa2d5ee | |
Nicolas Humbert | ad39d90b6f | |
Jonathan Gramain | 20e9fe4adb | |
bert-e | e9c67f7f67 | |
Jonathan Gramain | af3fd17ec2 | |
bert-e | 536d474f57 | |
bert-e | 55e68cfa17 | |
bert-e | 67c98fd81b | |
williamlardier | 5cd70d7cf1 | |
KillianG | 25be9014c9 | |
KillianG | ed42f24580 | |
KillianG | ce076cb3df | |
KillianG | 4bc3de52ff | |
bert-e | beb5f69be3 | |
bert-e | 5f3540a0d5 | |
bert-e | 654d628d39 | |
gaspardmoindrot | e8a409e337 | |
Alexander Chan | 4093bf2b04 | |
Alexander Chan | d0bb6d5b0c | |
bert-e | 3f7229eebe | |
bert-e | 7eb9d52da5 | |
Nicolas Humbert | e216c9dd20 | |
williamlardier | 0c1afe535b | |
williamlardier | 73335ae6ec | |
Alexander Chan | 99c514e8f2 | |
Alexander Chan | cfd9fdcfc4 | |
Alexander Chan | d809dac5e3 | |
williamlardier | 53dac8d233 | |
williamlardier | 6d5ef07eee | |
williamlardier | 272166e406 | |
williamlardier | 3af05e672b | |
williamlardier | 8b0c90cb2f | |
Alexander Chan | dfc9b761e2 | |
Alexander Chan | 04f1eb7f04 | |
bert-e | c204b90847 | |
bert-e | 78d6e7fd72 | |
Alexander Chan | 7768fa8d35 | |
KillianG | 4d9a9adc48 | |
KillianG | c4804e52ee | |
KillianG | 671cf3a679 | |
Jonathan Gramain | 9a5e27f97b | |
Jonathan Gramain | d744a709d2 | |
Jonathan Gramain | a9d003c6f8 | |
Jonathan Gramain | 99e04bd6fa | |
Jonathan Gramain | d3bdddeba3 | |
bert-e | 3252f7de03 | |
Jonathan Gramain | c4cc5a2c3d | |
Jonathan Gramain | fedd0190cc | |
Jonathan Gramain | 56fd4ad734 | |
Jonathan Gramain | ebe6b65fcf | |
Nicolas Humbert | 7994bf7b96 | |
Nicolas Humbert | 4be0a06c4a | |
bert-e | da7dbdc51f | |
Will Toozs | 2103ef1237 | |
Will Toozs | dbc1c54246 | |
bert-e | 6c22f8404d | |
KillianG | 00e03f0592 | |
KillianG | d453758b7d | |
KillianG | a964dc99c3 | |
Jonathan Gramain | 3a4da1d7c0 | |
williamlardier | 5074e6c0a4 | |
williamlardier | bd05dd6918 | |
williamlardier | fbda12ce3c | |
Nicolas Humbert | b02934bb39 | |
Nicolas Humbert | c9a444969b | |
Nicolas Humbert | 5d018860ec | |
bert-e | 5838e02096 | |
Nicolas Humbert | ecd600ac4b | |
Naren | ab0324da05 | |
Naren | 2b353b33af | |
Naren | 5377b20ceb | |
Naren | 21b329b301 | |
Naren | bd76402586 | |
bert-e | fd57f47be1 | |
bert-e | 94edf8be70 | |
Naren | 1d104345fd | |
Jonathan Gramain | 58e47e5015 | |
Jonathan Gramain | 4d782ecec6 | |
Jonathan Gramain | 655a10ce52 | |
Jonathan Gramain | 0c7f0e607d | |
Jonathan Gramain | caa5d53e9b | |
Jonathan Gramain | 21da975187 | |
bert-e | e0df67a115 | |
Naren | 7e18ae77e0 | |
Naren | 4750118f85 | |
Naren | c273c8b823 | |
Jonathan Gramain | d3b50fafa8 | |
Naren | 47e68a9b60 | |
Naren | bd0a199ffa | |
Naren | 4b1f69bcbb | |
Naren | e3a6814e3f | |
Alexander Chan | bf4072151f | |
Alexander Chan | f33cd69e45 | |
Alexander Chan | acd13ff31b | |
Alexander Chan | bb3e5d078f | |
Jonathan Gramain | 22fa04b7e7 | |
Jonathan Gramain | 10a94a0a96 | |
bert-e | 4d71a834d5 | |
Alexander Chan | 054f61d6c1 | |
Alexander Chan | fa26a487f5 | |
Alexander Chan | c1dd2e4946 | |
Alexander Chan | a714103b82 |
|
@ -1 +1,6 @@
|
||||||
{ "extends": "scality" }
|
{
|
||||||
|
"extends": "scality",
|
||||||
|
"parserOptions": {
|
||||||
|
"ecmaVersion": 2020
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
---
|
||||||
|
name: codeQL
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [development/*, stabilization/*, hotfix/*]
|
||||||
|
pull_request:
|
||||||
|
branches: [development/*, stabilization/*, hotfix/*]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Static analysis with CodeQL
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v3
|
||||||
|
with:
|
||||||
|
languages: javascript, typescript
|
||||||
|
|
||||||
|
- name: Build and analyze
|
||||||
|
uses: github/codeql-action/analyze@v3
|
|
@ -0,0 +1,16 @@
|
||||||
|
---
|
||||||
|
name: dependency review
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: [development/*, stabilization/*, hotfix/*]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dependency-review:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: 'Checkout Repository'
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: 'Dependency Review'
|
||||||
|
uses: actions/dependency-review-action@v4
|
|
@ -25,8 +25,8 @@ jobs:
|
||||||
- 6379:6379
|
- 6379:6379
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v4
|
||||||
- uses: actions/setup-node@v2
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
cache: 'yarn'
|
cache: 'yarn'
|
||||||
|
@ -46,7 +46,9 @@ jobs:
|
||||||
run: yarn --silent coverage
|
run: yarn --silent coverage
|
||||||
- name: run functional tests
|
- name: run functional tests
|
||||||
run: yarn ft_test
|
run: yarn ft_test
|
||||||
- uses: codecov/codecov-action@v2
|
- uses: codecov/codecov-action@v4
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
- name: run executables tests
|
- name: run executables tests
|
||||||
run: yarn install && yarn test
|
run: yarn install && yarn test
|
||||||
working-directory: 'lib/executables/pensieveCreds/'
|
working-directory: 'lib/executables/pensieveCreds/'
|
||||||
|
@ -57,9 +59,9 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v4
|
||||||
- name: Install NodeJS
|
- name: Install NodeJS
|
||||||
uses: actions/setup-node@v2
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
cache: yarn
|
cache: yarn
|
||||||
|
@ -70,7 +72,7 @@ jobs:
|
||||||
run: yarn build
|
run: yarn build
|
||||||
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: scality/action-artifacts@v2
|
uses: scality/action-artifacts@v4
|
||||||
with:
|
with:
|
||||||
url: https://artifacts.scality.net
|
url: https://artifacts.scality.net
|
||||||
user: ${{ secrets.ARTIFACTS_USER }}
|
user: ${{ secrets.ARTIFACTS_USER }}
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
{
|
||||||
|
"$schema": "https://swc.rs/schema.json",
|
||||||
|
"jsc": {
|
||||||
|
"parser": {
|
||||||
|
"syntax": "typescript"
|
||||||
|
},
|
||||||
|
"target": "es2017"
|
||||||
|
},
|
||||||
|
"module": {
|
||||||
|
"type": "commonjs"
|
||||||
|
}
|
||||||
|
}
|
|
@ -245,4 +245,16 @@ For capacity-enabled buckets, contains the following data:
|
||||||
|
|
||||||
### Usage
|
### Usage
|
||||||
|
|
||||||
Used to store bucket tagging
|
Used to store bucket tagging
|
||||||
|
|
||||||
|
## Model version 17
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._quotaMax = quotaMax || 0;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to store bucket quota
|
|
@ -0,0 +1,27 @@
|
||||||
|
# Delimiter
|
||||||
|
|
||||||
|
The Delimiter class handles raw listings from the database with an
|
||||||
|
optional delimiter, and fills in a curated listing with "Contents" and
|
||||||
|
"CommonPrefixes" as a result.
|
||||||
|
|
||||||
|
## Expected Behavior
|
||||||
|
|
||||||
|
- only lists keys belonging to the given **prefix** (if provided)
|
||||||
|
|
||||||
|
- groups listed keys that have a common prefix ending with a delimiter
|
||||||
|
inside CommonPrefixes
|
||||||
|
|
||||||
|
- can take a **marker** or **continuationToken** to list from a specific key
|
||||||
|
|
||||||
|
- can take a **maxKeys** parameter to limit how many keys can be returned
|
||||||
|
|
||||||
|
## State Chart
|
||||||
|
|
||||||
|
- States with grey background are *Idle* states, which are waiting for
|
||||||
|
a new listing key
|
||||||
|
|
||||||
|
- States with blue background are *Processing* states, which are
|
||||||
|
actively processing a new listing key passed by the filter()
|
||||||
|
function
|
||||||
|
|
||||||
|
![Delimiter State Chart](./pics/delimiterStateChart.svg)
|
|
@ -0,0 +1,45 @@
|
||||||
|
# DelimiterMaster
|
||||||
|
|
||||||
|
The DelimiterMaster class handles raw listings from the database of a
|
||||||
|
versioned or non-versioned bucket with an optional delimiter, and
|
||||||
|
fills in a curated listing with "Contents" and "CommonPrefixes" as a
|
||||||
|
result.
|
||||||
|
|
||||||
|
## Expected Behavior
|
||||||
|
|
||||||
|
- only lists latest versions of versioned buckets
|
||||||
|
|
||||||
|
- only lists keys belonging to the given **prefix** (if provided)
|
||||||
|
|
||||||
|
- does not list latest versions that are delete markers
|
||||||
|
|
||||||
|
- groups listed keys that have a common prefix ending with a delimiter
|
||||||
|
inside CommonPrefixes
|
||||||
|
|
||||||
|
- can take a **marker** or **continuationToken** to list from a specific key
|
||||||
|
|
||||||
|
- can take a **maxKeys** parameter to limit how many keys can be returned
|
||||||
|
|
||||||
|
- reconciles internal PHD keys with the next version (those are
|
||||||
|
created when a specific version that is the latest version is
|
||||||
|
deleted)
|
||||||
|
|
||||||
|
- skips internal keys like replay keys
|
||||||
|
|
||||||
|
## State Chart
|
||||||
|
|
||||||
|
- States with grey background are *Idle* states, which are waiting for
|
||||||
|
a new listing key
|
||||||
|
|
||||||
|
- States with blue background are *Processing* states, which are
|
||||||
|
actively processing a new listing key passed by the filter()
|
||||||
|
function
|
||||||
|
|
||||||
|
### Bucket Vformat=v0
|
||||||
|
|
||||||
|
![DelimiterMaster State Chart for v0 format](./pics/delimiterMasterV0StateChart.svg)
|
||||||
|
|
||||||
|
### Bucket Vformat=v1
|
||||||
|
|
||||||
|
For buckets in versioning key format **v1**, the algorithm used is the
|
||||||
|
one from [Delimiter](delimiter.md).
|
|
@ -0,0 +1,33 @@
|
||||||
|
# DelimiterVersions
|
||||||
|
|
||||||
|
The DelimiterVersions class handles raw listings from the database of a
|
||||||
|
versioned or non-versioned bucket with an optional delimiter, and
|
||||||
|
fills in a curated listing with "Versions" and "CommonPrefixes" as a
|
||||||
|
result.
|
||||||
|
|
||||||
|
## Expected Behavior
|
||||||
|
|
||||||
|
- lists individual distinct versions of versioned buckets
|
||||||
|
|
||||||
|
- only lists keys belonging to the given **prefix** (if provided)
|
||||||
|
|
||||||
|
- groups listed keys that have a common prefix ending with a delimiter
|
||||||
|
inside CommonPrefixes
|
||||||
|
|
||||||
|
- can take a **keyMarker** and optionally a **versionIdMarker** to
|
||||||
|
list from a specific key or version
|
||||||
|
|
||||||
|
- can take a **maxKeys** parameter to limit how many keys can be returned
|
||||||
|
|
||||||
|
- skips internal keys like replay keys
|
||||||
|
|
||||||
|
## State Chart
|
||||||
|
|
||||||
|
- States with grey background are *Idle* states, which are waiting for
|
||||||
|
a new listing key
|
||||||
|
|
||||||
|
- States with blue background are *Processing* states, which are
|
||||||
|
actively processing a new listing key passed by the filter()
|
||||||
|
function
|
||||||
|
|
||||||
|
![DelimiterVersions State Chart](./pics/delimiterVersionsStateChart.svg)
|
|
@ -0,0 +1,45 @@
|
||||||
|
digraph {
|
||||||
|
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
||||||
|
edge [fontsize=14];
|
||||||
|
rankdir=TB;
|
||||||
|
|
||||||
|
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
||||||
|
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
||||||
|
|
||||||
|
node [fillcolor="lightgrey"];
|
||||||
|
"NotSkippingPrefixNorVersions.Idle" [label="NotSkippingPrefixNorVersions",group="NotSkippingPrefixNorVersions",width=4];
|
||||||
|
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
"SkippingVersions.Idle" [label="SkippingVersions",group="SkippingVersions"];
|
||||||
|
"WaitVersionAfterPHD.Idle" [label="WaitVersionAfterPHD",group="WaitVersionAfterPHD"];
|
||||||
|
|
||||||
|
node [fillcolor="lightblue"];
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" [label="NotSkippingPrefixNorVersions",group="NotSkippingPrefixNorVersions",width=4];
|
||||||
|
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
"SkippingVersions.Processing" [label="SkippingVersions",group="SkippingVersions"];
|
||||||
|
"WaitVersionAfterPHD.Processing" [label="WaitVersionAfterPHD",group="WaitVersionAfterPHD"];
|
||||||
|
|
||||||
|
START -> "SkippingVersions.Idle" [label="[marker != undefined]"]
|
||||||
|
START -> "NotSkippingPrefixNorVersions.Idle" [label="[marker == undefined]"]
|
||||||
|
|
||||||
|
"NotSkippingPrefixNorVersions.Idle" -> "NotSkippingPrefixNorVersions.Processing" [label="filter(key, value)"]
|
||||||
|
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
||||||
|
"SkippingVersions.Idle" -> "SkippingVersions.Processing" [label="filter(key, value)"]
|
||||||
|
"WaitVersionAfterPHD.Idle" -> "WaitVersionAfterPHD.Processing" [label="filter(key, value)"]
|
||||||
|
|
||||||
|
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> "SkippingVersions.Idle" [label="[Version.isDeleteMarker(value)]\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> "WaitVersionAfterPHD.Idle" [label="[Version.isPHD(value)]\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(<ReplayPrefix>)]\n/ prefix <- <ReplayPrefix>\n-> FILTER_SKIP"]
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> END [label="[isListableKey(key, value) and\nKeys == maxKeys]\n-> FILTER_END"]
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> "SkippingPrefix.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nhasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingPrefixNorVersions.Processing" -> "SkippingVersions.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nnot hasDelimiter(key)]\n/ Contents.append(key, value)\n-> FILTER_ACCEPT"]
|
||||||
|
|
||||||
|
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
||||||
|
"SkippingPrefix.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not key.startsWith(prefix)]"]
|
||||||
|
|
||||||
|
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="[isVersionKey(key)]\n-> FILTER_SKIP"]
|
||||||
|
"SkippingVersions.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not isVersionKey(key)]"]
|
||||||
|
|
||||||
|
"WaitVersionAfterPHD.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[isVersionKey(key) and master(key) == PHDkey]\n/ key <- master(key)"]
|
||||||
|
"WaitVersionAfterPHD.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not isVersionKey(key) or master(key) != PHDkey]"]
|
||||||
|
}
|
|
@ -0,0 +1,216 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||||
|
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||||
|
<!-- Generated by graphviz version 2.43.0 (0)
|
||||||
|
-->
|
||||||
|
<!-- Title: %3 Pages: 1 -->
|
||||||
|
<svg width="2313pt" height="460pt"
|
||||||
|
viewBox="0.00 0.00 2313.37 460.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||||
|
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 456)">
|
||||||
|
<title>%3</title>
|
||||||
|
<polygon fill="white" stroke="transparent" points="-4,4 -4,-456 2309.37,-456 2309.37,4 -4,4"/>
|
||||||
|
<!-- START -->
|
||||||
|
<g id="node1" class="node">
|
||||||
|
<title>START</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="35.37" cy="-445" rx="7" ry="7"/>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Idle -->
|
||||||
|
<g id="node3" class="node">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M925.37,-387C925.37,-387 661.37,-387 661.37,-387 655.37,-387 649.37,-381 649.37,-375 649.37,-375 649.37,-363 649.37,-363 649.37,-357 655.37,-351 661.37,-351 661.37,-351 925.37,-351 925.37,-351 931.37,-351 937.37,-357 937.37,-363 937.37,-363 937.37,-375 937.37,-375 937.37,-381 931.37,-387 925.37,-387"/>
|
||||||
|
<text text-anchor="middle" x="793.37" y="-365.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefixNorVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- START->NotSkippingPrefixNorVersions.Idle -->
|
||||||
|
<g id="edge2" class="edge">
|
||||||
|
<title>START->NotSkippingPrefixNorVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M42.39,-443.31C95.3,-438.15 434.98,-404.99 638.94,-385.08"/>
|
||||||
|
<polygon fill="black" stroke="black" points="639.54,-388.53 649.15,-384.08 638.86,-381.57 639.54,-388.53"/>
|
||||||
|
<text text-anchor="middle" x="497.87" y="-408.8" font-family="Times,serif" font-size="14.00">[marker == undefined]</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Idle -->
|
||||||
|
<g id="node5" class="node">
|
||||||
|
<title>SkippingVersions.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M242.37,-138C242.37,-138 50.37,-138 50.37,-138 44.37,-138 38.37,-132 38.37,-126 38.37,-126 38.37,-114 38.37,-114 38.37,-108 44.37,-102 50.37,-102 50.37,-102 242.37,-102 242.37,-102 248.37,-102 254.37,-108 254.37,-114 254.37,-114 254.37,-126 254.37,-126 254.37,-132 248.37,-138 242.37,-138"/>
|
||||||
|
<text text-anchor="middle" x="146.37" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- START->SkippingVersions.Idle -->
|
||||||
|
<g id="edge1" class="edge">
|
||||||
|
<title>START->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M33.04,-438.14C20.64,-405.9 -34.57,-248.17 33.37,-156 36.76,-151.4 40.74,-147.39 45.16,-143.89"/>
|
||||||
|
<polygon fill="black" stroke="black" points="47.27,-146.68 53.53,-138.13 43.3,-140.92 47.27,-146.68"/>
|
||||||
|
<text text-anchor="middle" x="85.87" y="-321.8" font-family="Times,serif" font-size="14.00">[marker != undefined]</text>
|
||||||
|
</g>
|
||||||
|
<!-- END -->
|
||||||
|
<g id="node2" class="node">
|
||||||
|
<title>END</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="727.37" cy="-120" rx="7" ry="7"/>
|
||||||
|
<ellipse fill="none" stroke="black" cx="727.37" cy="-120" rx="11" ry="11"/>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="node7" class="node">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M925.37,-300C925.37,-300 661.37,-300 661.37,-300 655.37,-300 649.37,-294 649.37,-288 649.37,-288 649.37,-276 649.37,-276 649.37,-270 655.37,-264 661.37,-264 661.37,-264 925.37,-264 925.37,-264 931.37,-264 937.37,-270 937.37,-276 937.37,-276 937.37,-288 937.37,-288 937.37,-294 931.37,-300 925.37,-300"/>
|
||||||
|
<text text-anchor="middle" x="793.37" y="-278.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefixNorVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Idle->NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="edge3" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Idle->NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M793.37,-350.8C793.37,-339.16 793.37,-323.55 793.37,-310.24"/>
|
||||||
|
<polygon fill="black" stroke="black" points="796.87,-310.18 793.37,-300.18 789.87,-310.18 796.87,-310.18"/>
|
||||||
|
<text text-anchor="middle" x="851.37" y="-321.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle -->
|
||||||
|
<g id="node4" class="node">
|
||||||
|
<title>SkippingPrefix.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M1209.37,-138C1209.37,-138 1017.37,-138 1017.37,-138 1011.37,-138 1005.37,-132 1005.37,-126 1005.37,-126 1005.37,-114 1005.37,-114 1005.37,-108 1011.37,-102 1017.37,-102 1017.37,-102 1209.37,-102 1209.37,-102 1215.37,-102 1221.37,-108 1221.37,-114 1221.37,-114 1221.37,-126 1221.37,-126 1221.37,-132 1215.37,-138 1209.37,-138"/>
|
||||||
|
<text text-anchor="middle" x="1113.37" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing -->
|
||||||
|
<g id="node8" class="node">
|
||||||
|
<title>SkippingPrefix.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M1070.37,-36C1070.37,-36 878.37,-36 878.37,-36 872.37,-36 866.37,-30 866.37,-24 866.37,-24 866.37,-12 866.37,-12 866.37,-6 872.37,0 878.37,0 878.37,0 1070.37,0 1070.37,0 1076.37,0 1082.37,-6 1082.37,-12 1082.37,-12 1082.37,-24 1082.37,-24 1082.37,-30 1076.37,-36 1070.37,-36"/>
|
||||||
|
<text text-anchor="middle" x="974.37" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
||||||
|
<g id="edge4" class="edge">
|
||||||
|
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1011.89,-101.96C994.96,-97.13 981.04,-91.17 975.37,-84 967.11,-73.56 966.25,-58.93 967.72,-46.2"/>
|
||||||
|
<polygon fill="black" stroke="black" points="971.22,-46.52 969.4,-36.09 964.31,-45.38 971.22,-46.52"/>
|
||||||
|
<text text-anchor="middle" x="1033.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing -->
|
||||||
|
<g id="node9" class="node">
|
||||||
|
<title>SkippingVersions.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M381.37,-36C381.37,-36 189.37,-36 189.37,-36 183.37,-36 177.37,-30 177.37,-24 177.37,-24 177.37,-12 177.37,-12 177.37,-6 183.37,0 189.37,0 189.37,0 381.37,0 381.37,0 387.37,0 393.37,-6 393.37,-12 393.37,-12 393.37,-24 393.37,-24 393.37,-30 387.37,-36 381.37,-36"/>
|
||||||
|
<text text-anchor="middle" x="285.37" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Idle->SkippingVersions.Processing -->
|
||||||
|
<g id="edge5" class="edge">
|
||||||
|
<title>SkippingVersions.Idle->SkippingVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M141.4,-101.91C138.35,-87.58 136.8,-67.37 147.37,-54 151.89,-48.28 161.64,-43.34 173.99,-39.12"/>
|
||||||
|
<polygon fill="black" stroke="black" points="175.39,-42.36 183.89,-36.04 173.3,-35.67 175.39,-42.36"/>
|
||||||
|
<text text-anchor="middle" x="205.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitVersionAfterPHD.Idle -->
|
||||||
|
<g id="node6" class="node">
|
||||||
|
<title>WaitVersionAfterPHD.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M1534.37,-138C1534.37,-138 1342.37,-138 1342.37,-138 1336.37,-138 1330.37,-132 1330.37,-126 1330.37,-126 1330.37,-114 1330.37,-114 1330.37,-108 1336.37,-102 1342.37,-102 1342.37,-102 1534.37,-102 1534.37,-102 1540.37,-102 1546.37,-108 1546.37,-114 1546.37,-114 1546.37,-126 1546.37,-126 1546.37,-132 1540.37,-138 1534.37,-138"/>
|
||||||
|
<text text-anchor="middle" x="1438.37" y="-116.2" font-family="Times,serif" font-size="16.00">WaitVersionAfterPHD</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitVersionAfterPHD.Processing -->
|
||||||
|
<g id="node10" class="node">
|
||||||
|
<title>WaitVersionAfterPHD.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M1534.37,-36C1534.37,-36 1342.37,-36 1342.37,-36 1336.37,-36 1330.37,-30 1330.37,-24 1330.37,-24 1330.37,-12 1330.37,-12 1330.37,-6 1336.37,0 1342.37,0 1342.37,0 1534.37,0 1534.37,0 1540.37,0 1546.37,-6 1546.37,-12 1546.37,-12 1546.37,-24 1546.37,-24 1546.37,-30 1540.37,-36 1534.37,-36"/>
|
||||||
|
<text text-anchor="middle" x="1438.37" y="-14.2" font-family="Times,serif" font-size="16.00">WaitVersionAfterPHD</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitVersionAfterPHD.Idle->WaitVersionAfterPHD.Processing -->
|
||||||
|
<g id="edge6" class="edge">
|
||||||
|
<title>WaitVersionAfterPHD.Idle->WaitVersionAfterPHD.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1438.37,-101.58C1438.37,-86.38 1438.37,-64.07 1438.37,-46.46"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1441.87,-46.22 1438.37,-36.22 1434.87,-46.22 1441.87,-46.22"/>
|
||||||
|
<text text-anchor="middle" x="1496.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->END -->
|
||||||
|
<g id="edge10" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->END</title>
|
||||||
|
<path fill="none" stroke="black" d="M649.15,-273.62C611.7,-268.54 578.44,-260.07 566.37,-246 540.33,-215.64 540,-186.08 566.37,-156 586.46,-133.07 673.88,-148.86 702.37,-138 705.22,-136.91 708.06,-135.44 710.76,-133.82"/>
|
||||||
|
<polygon fill="black" stroke="black" points="712.88,-136.61 719.13,-128.05 708.91,-130.84 712.88,-136.61"/>
|
||||||
|
<text text-anchor="middle" x="672.87" y="-212.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="672.87" y="-197.3" font-family="Times,serif" font-size="14.00">Keys == maxKeys]</text>
|
||||||
|
<text text-anchor="middle" x="672.87" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge9" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M937.6,-274.31C1018.89,-269.01 1106.69,-260.11 1119.37,-246 1143.16,-219.51 1134.03,-175.72 1124.38,-147.62"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1127.6,-146.22 1120.86,-138.04 1121.03,-148.64 1127.6,-146.22"/>
|
||||||
|
<text text-anchor="middle" x="1254.37" y="-212.3" font-family="Times,serif" font-size="14.00">[key.startsWith(<ReplayPrefix>)]</text>
|
||||||
|
<text text-anchor="middle" x="1254.37" y="-197.3" font-family="Times,serif" font-size="14.00">/ prefix <- <ReplayPrefix></text>
|
||||||
|
<text text-anchor="middle" x="1254.37" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge11" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M799.18,-263.65C800.96,-258.05 802.85,-251.79 804.37,-246 814.73,-206.45 793.03,-183.41 823.37,-156 851.23,-130.83 954.1,-142.59 991.37,-138 992.65,-137.84 993.94,-137.68 995.24,-137.52"/>
|
||||||
|
<polygon fill="black" stroke="black" points="995.81,-140.98 1005.29,-136.25 994.93,-134.03 995.81,-140.98"/>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-234.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-219.8" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-204.8" font-family="Times,serif" font-size="14.00">hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
||||||
|
<text text-anchor="middle" x="969.37" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle -->
|
||||||
|
<g id="edge7" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M649.11,-279.23C439.56,-275.94 73.58,-267.19 53.37,-246 25.76,-217.06 30.6,-188.89 53.37,-156 56.56,-151.39 60.44,-147.39 64.78,-143.91"/>
|
||||||
|
<polygon fill="black" stroke="black" points="66.8,-146.76 73.04,-138.2 62.83,-141 66.8,-146.76"/>
|
||||||
|
<text text-anchor="middle" x="167.87" y="-204.8" font-family="Times,serif" font-size="14.00">[Version.isDeleteMarker(value)]</text>
|
||||||
|
<text text-anchor="middle" x="167.87" y="-189.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle -->
|
||||||
|
<g id="edge12" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M649.33,-279.1C514.97,-275.99 331.4,-267.75 305.37,-246 273.69,-219.53 311.53,-185.22 282.37,-156 276.73,-150.36 270.32,-145.59 263.42,-141.56"/>
|
||||||
|
<polygon fill="black" stroke="black" points="264.92,-138.39 254.44,-136.84 261.67,-144.59 264.92,-138.39"/>
|
||||||
|
<text text-anchor="middle" x="411.87" y="-227.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="411.87" y="-212.3" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||||
|
<text text-anchor="middle" x="411.87" y="-197.3" font-family="Times,serif" font-size="14.00">not hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="411.87" y="-182.3" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
||||||
|
<text text-anchor="middle" x="411.87" y="-167.3" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefixNorVersions.Processing->WaitVersionAfterPHD.Idle -->
|
||||||
|
<g id="edge8" class="edge">
|
||||||
|
<title>NotSkippingPrefixNorVersions.Processing->WaitVersionAfterPHD.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M937.38,-280.87C1099.43,-279.42 1344.59,-272.74 1378.37,-246 1411.11,-220.08 1384.48,-192.16 1405.37,-156 1407.38,-152.52 1409.8,-149.11 1412.4,-145.87"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1415.16,-148.04 1419.13,-138.21 1409.9,-143.41 1415.16,-148.04"/>
|
||||||
|
<text text-anchor="middle" x="1486.87" y="-204.8" font-family="Times,serif" font-size="14.00">[Version.isPHD(value)]</text>
|
||||||
|
<text text-anchor="middle" x="1486.87" y="-189.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge13" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M1064.61,-36.08C1074.44,-40.7 1083.66,-46.57 1091.37,-54 1101.65,-63.92 1107.13,-78.81 1110.04,-91.84"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1106.62,-92.56 1111.88,-101.76 1113.5,-91.29 1106.62,-92.56"/>
|
||||||
|
<text text-anchor="middle" x="1190.37" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
||||||
|
<text text-anchor="middle" x="1190.37" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="edge14" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M899.82,-36.01C864.18,-48.2 824.54,-68.57 802.37,-102 771.84,-148.02 779.31,-216.26 786.77,-253.8"/>
|
||||||
|
<polygon fill="black" stroke="black" points="783.43,-254.92 788.94,-263.97 790.28,-253.46 783.43,-254.92"/>
|
||||||
|
<text text-anchor="middle" x="899.37" y="-116.3" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing->SkippingVersions.Idle -->
|
||||||
|
<g id="edge15" class="edge">
|
||||||
|
<title>SkippingVersions.Processing->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M283.88,-36.24C281.71,-50.87 276.4,-71.43 263.37,-84 258.07,-89.11 252.06,-93.48 245.62,-97.21"/>
|
||||||
|
<polygon fill="black" stroke="black" points="243.85,-94.19 236.61,-101.92 247.09,-100.39 243.85,-94.19"/>
|
||||||
|
<text text-anchor="middle" x="349.87" y="-72.8" font-family="Times,serif" font-size="14.00">[isVersionKey(key)]</text>
|
||||||
|
<text text-anchor="middle" x="349.87" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="edge16" class="edge">
|
||||||
|
<title>SkippingVersions.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M382.46,-36.08C396.72,-40.7 410.82,-46.57 423.37,-54 476.67,-85.57 487.28,-102.42 518.37,-156 539.39,-192.23 514.46,-218.85 546.37,-246 561.72,-259.06 598.56,-267.25 639.23,-272.39"/>
|
||||||
|
<polygon fill="black" stroke="black" points="639.01,-275.89 649.36,-273.59 639.84,-268.93 639.01,-275.89"/>
|
||||||
|
<text text-anchor="middle" x="590.37" y="-116.3" font-family="Times,serif" font-size="14.00">[not isVersionKey(key)]</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="edge17" class="edge">
|
||||||
|
<title>WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1536.41,-36.13C1544.73,-40.79 1552.27,-46.65 1558.37,-54 1585.64,-86.89 1597.89,-215.12 1568.37,-246 1547.29,-268.05 1167.71,-276.42 947.74,-279.43"/>
|
||||||
|
<polygon fill="black" stroke="black" points="947.67,-275.93 937.71,-279.57 947.76,-282.93 947.67,-275.93"/>
|
||||||
|
<text text-anchor="middle" x="1758.37" y="-123.8" font-family="Times,serif" font-size="14.00">[isVersionKey(key) and master(key) == PHDkey]</text>
|
||||||
|
<text text-anchor="middle" x="1758.37" y="-108.8" font-family="Times,serif" font-size="14.00">/ key <- master(key)</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing -->
|
||||||
|
<g id="edge18" class="edge">
|
||||||
|
<title>WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1546.51,-21.25C1677.94,-26.54 1888.29,-44.09 1937.37,-102 1947.71,-114.21 1946.85,-125.11 1937.37,-138 1841.62,-268.08 1749.48,-218.23 1590.37,-246 1471.26,-266.79 1143.92,-275.5 947.77,-278.94"/>
|
||||||
|
<polygon fill="black" stroke="black" points="947.6,-275.44 937.66,-279.11 947.72,-282.44 947.6,-275.44"/>
|
||||||
|
<text text-anchor="middle" x="2124.87" y="-116.3" font-family="Times,serif" font-size="14.00">[not isVersionKey(key) or master(key) != PHDkey]</text>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 18 KiB |
|
@ -0,0 +1,35 @@
|
||||||
|
digraph {
|
||||||
|
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
||||||
|
edge [fontsize=14];
|
||||||
|
rankdir=TB;
|
||||||
|
|
||||||
|
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
||||||
|
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
||||||
|
|
||||||
|
node [fillcolor="lightgrey"];
|
||||||
|
"NotSkipping.Idle" [label="NotSkipping",group="NotSkipping"];
|
||||||
|
"NeverSkipping.Idle" [label="NeverSkipping",group="NeverSkipping"];
|
||||||
|
"NotSkippingPrefix.Idle" [label="NotSkippingPrefix",group="NotSkippingPrefix"];
|
||||||
|
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
|
||||||
|
node [fillcolor="lightblue"];
|
||||||
|
"NeverSkipping.Processing" [label="NeverSkipping",group="NeverSkipping"];
|
||||||
|
"NotSkippingPrefix.Processing" [label="NotSkippingPrefix",group="NotSkippingPrefix"];
|
||||||
|
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
|
||||||
|
START -> "NotSkipping.Idle"
|
||||||
|
"NotSkipping.Idle" -> "NeverSkipping.Idle" [label="[delimiter == undefined]"]
|
||||||
|
"NotSkipping.Idle" -> "NotSkippingPrefix.Idle" [label="[delimiter == '/']"]
|
||||||
|
|
||||||
|
"NeverSkipping.Idle" -> "NeverSkipping.Processing" [label="filter(key, value)"]
|
||||||
|
"NotSkippingPrefix.Idle" -> "NotSkippingPrefix.Processing" [label="filter(key, value)"]
|
||||||
|
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
||||||
|
|
||||||
|
"NeverSkipping.Processing" -> END [label="[nKeys == maxKeys]\n-> FILTER_END"]
|
||||||
|
"NeverSkipping.Processing" -> "NeverSkipping.Idle" [label="[nKeys < maxKeys]\n/ Contents.append(key, value)\n -> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingPrefix.Processing" -> END [label="[nKeys == maxKeys]\n -> FILTER_END"]
|
||||||
|
"NotSkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[nKeys < maxKeys and hasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingPrefix.Processing" -> "NotSkippingPrefix.Idle" [label="[nKeys < maxKeys and not hasDelimiter(key)]\n/ Contents.append(key, value)\n -> FILTER_ACCEPT"]
|
||||||
|
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
||||||
|
"SkippingPrefix.Processing" -> "NotSkippingPrefix.Processing" [label="[not key.startsWith(prefix)]"]
|
||||||
|
}
|
|
@ -0,0 +1,166 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||||
|
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||||
|
<!-- Generated by graphviz version 2.43.0 (0)
|
||||||
|
-->
|
||||||
|
<!-- Title: %3 Pages: 1 -->
|
||||||
|
<svg width="975pt" height="533pt"
|
||||||
|
viewBox="0.00 0.00 975.00 533.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||||
|
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 529)">
|
||||||
|
<title>%3</title>
|
||||||
|
<polygon fill="white" stroke="transparent" points="-4,4 -4,-529 971,-529 971,4 -4,4"/>
|
||||||
|
<!-- START -->
|
||||||
|
<g id="node1" class="node">
|
||||||
|
<title>START</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="283" cy="-518" rx="7" ry="7"/>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Idle -->
|
||||||
|
<g id="node3" class="node">
|
||||||
|
<title>NotSkipping.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M379,-474C379,-474 187,-474 187,-474 181,-474 175,-468 175,-462 175,-462 175,-450 175,-450 175,-444 181,-438 187,-438 187,-438 379,-438 379,-438 385,-438 391,-444 391,-450 391,-450 391,-462 391,-462 391,-468 385,-474 379,-474"/>
|
||||||
|
<text text-anchor="middle" x="283" y="-452.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
|
||||||
|
</g>
|
||||||
|
<!-- START->NotSkipping.Idle -->
|
||||||
|
<g id="edge1" class="edge">
|
||||||
|
<title>START->NotSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M283,-510.58C283,-504.23 283,-494.07 283,-484.3"/>
|
||||||
|
<polygon fill="black" stroke="black" points="286.5,-484.05 283,-474.05 279.5,-484.05 286.5,-484.05"/>
|
||||||
|
</g>
|
||||||
|
<!-- END -->
|
||||||
|
<g id="node2" class="node">
|
||||||
|
<title>END</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="196" cy="-120" rx="7" ry="7"/>
|
||||||
|
<ellipse fill="none" stroke="black" cx="196" cy="-120" rx="11" ry="11"/>
|
||||||
|
</g>
|
||||||
|
<!-- NeverSkipping.Idle -->
|
||||||
|
<g id="node4" class="node">
|
||||||
|
<title>NeverSkipping.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M262,-387C262,-387 70,-387 70,-387 64,-387 58,-381 58,-375 58,-375 58,-363 58,-363 58,-357 64,-351 70,-351 70,-351 262,-351 262,-351 268,-351 274,-357 274,-363 274,-363 274,-375 274,-375 274,-381 268,-387 262,-387"/>
|
||||||
|
<text text-anchor="middle" x="166" y="-365.2" font-family="Times,serif" font-size="16.00">NeverSkipping</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Idle->NeverSkipping.Idle -->
|
||||||
|
<g id="edge2" class="edge">
|
||||||
|
<title>NotSkipping.Idle->NeverSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M216.5,-437.82C206.51,-433.18 196.91,-427.34 189,-420 182.25,-413.74 177.33,-405.11 173.81,-396.79"/>
|
||||||
|
<polygon fill="black" stroke="black" points="177.05,-395.47 170.3,-387.31 170.49,-397.9 177.05,-395.47"/>
|
||||||
|
<text text-anchor="middle" x="279.5" y="-408.8" font-family="Times,serif" font-size="14.00">[delimiter == undefined]</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Idle -->
|
||||||
|
<g id="node5" class="node">
|
||||||
|
<title>NotSkippingPrefix.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M496,-387C496,-387 304,-387 304,-387 298,-387 292,-381 292,-375 292,-375 292,-363 292,-363 292,-357 298,-351 304,-351 304,-351 496,-351 496,-351 502,-351 508,-357 508,-363 508,-363 508,-375 508,-375 508,-381 502,-387 496,-387"/>
|
||||||
|
<text text-anchor="middle" x="400" y="-365.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Idle->NotSkippingPrefix.Idle -->
|
||||||
|
<g id="edge3" class="edge">
|
||||||
|
<title>NotSkipping.Idle->NotSkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M340.77,-437.93C351.2,-433.2 361.45,-427.29 370,-420 377.58,-413.53 383.76,-404.65 388.51,-396.16"/>
|
||||||
|
<polygon fill="black" stroke="black" points="391.63,-397.74 393.08,-387.24 385.4,-394.54 391.63,-397.74"/>
|
||||||
|
<text text-anchor="middle" x="442.5" y="-408.8" font-family="Times,serif" font-size="14.00">[delimiter == '/']</text>
|
||||||
|
</g>
|
||||||
|
<!-- NeverSkipping.Processing -->
|
||||||
|
<g id="node7" class="node">
|
||||||
|
<title>NeverSkipping.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M204,-270C204,-270 12,-270 12,-270 6,-270 0,-264 0,-258 0,-258 0,-246 0,-246 0,-240 6,-234 12,-234 12,-234 204,-234 204,-234 210,-234 216,-240 216,-246 216,-246 216,-258 216,-258 216,-264 210,-270 204,-270"/>
|
||||||
|
<text text-anchor="middle" x="108" y="-248.2" font-family="Times,serif" font-size="16.00">NeverSkipping</text>
|
||||||
|
</g>
|
||||||
|
<!-- NeverSkipping.Idle->NeverSkipping.Processing -->
|
||||||
|
<g id="edge4" class="edge">
|
||||||
|
<title>NeverSkipping.Idle->NeverSkipping.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M64.1,-350.93C47.33,-346.11 33.58,-340.17 28,-333 15.72,-317.21 17.05,-304.74 28,-288 30.93,-283.52 34.58,-279.6 38.69,-276.19"/>
|
||||||
|
<polygon fill="black" stroke="black" points="40.97,-278.86 47.1,-270.22 36.92,-273.16 40.97,-278.86"/>
|
||||||
|
<text text-anchor="middle" x="86" y="-306.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Processing -->
|
||||||
|
<g id="node8" class="node">
|
||||||
|
<title>NotSkippingPrefix.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M554,-270C554,-270 362,-270 362,-270 356,-270 350,-264 350,-258 350,-258 350,-246 350,-246 350,-240 356,-234 362,-234 362,-234 554,-234 554,-234 560,-234 566,-240 566,-246 566,-246 566,-258 566,-258 566,-264 560,-270 554,-270"/>
|
||||||
|
<text text-anchor="middle" x="458" y="-248.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Idle->NotSkippingPrefix.Processing -->
|
||||||
|
<g id="edge5" class="edge">
|
||||||
|
<title>NotSkippingPrefix.Idle->NotSkippingPrefix.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M395.69,-350.84C392.38,-333.75 390.03,-307.33 401,-288 403.42,-283.74 406.58,-279.94 410.19,-276.55"/>
|
||||||
|
<polygon fill="black" stroke="black" points="412.5,-279.18 418.1,-270.18 408.11,-273.73 412.5,-279.18"/>
|
||||||
|
<text text-anchor="middle" x="459" y="-306.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle -->
|
||||||
|
<g id="node6" class="node">
|
||||||
|
<title>SkippingPrefix.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M554,-138C554,-138 362,-138 362,-138 356,-138 350,-132 350,-126 350,-126 350,-114 350,-114 350,-108 356,-102 362,-102 362,-102 554,-102 554,-102 560,-102 566,-108 566,-114 566,-114 566,-126 566,-126 566,-132 560,-138 554,-138"/>
|
||||||
|
<text text-anchor="middle" x="458" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing -->
|
||||||
|
<g id="node9" class="node">
|
||||||
|
<title>SkippingPrefix.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M691,-36C691,-36 499,-36 499,-36 493,-36 487,-30 487,-24 487,-24 487,-12 487,-12 487,-6 493,0 499,0 499,0 691,0 691,0 697,0 703,-6 703,-12 703,-12 703,-24 703,-24 703,-30 697,-36 691,-36"/>
|
||||||
|
<text text-anchor="middle" x="595" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
||||||
|
<g id="edge6" class="edge">
|
||||||
|
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M452.35,-101.95C448.76,-87.65 446.54,-67.45 457,-54 461.44,-48.29 471.08,-43.36 483.3,-39.15"/>
|
||||||
|
<polygon fill="black" stroke="black" points="484.61,-42.41 493.1,-36.07 482.51,-35.73 484.61,-42.41"/>
|
||||||
|
<text text-anchor="middle" x="515" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- NeverSkipping.Processing->END -->
|
||||||
|
<g id="edge7" class="edge">
|
||||||
|
<title>NeverSkipping.Processing->END</title>
|
||||||
|
<path fill="none" stroke="black" d="M102.91,-233.88C97.93,-213.45 93.18,-179.15 109,-156 123.79,-134.35 154.41,-126.09 175.08,-122.94"/>
|
||||||
|
<polygon fill="black" stroke="black" points="175.62,-126.4 185.11,-121.69 174.76,-119.45 175.62,-126.4"/>
|
||||||
|
<text text-anchor="middle" x="185" y="-189.8" font-family="Times,serif" font-size="14.00">[nKeys == maxKeys]</text>
|
||||||
|
<text text-anchor="middle" x="185" y="-174.8" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
||||||
|
</g>
|
||||||
|
<!-- NeverSkipping.Processing->NeverSkipping.Idle -->
|
||||||
|
<g id="edge8" class="edge">
|
||||||
|
<title>NeverSkipping.Processing->NeverSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M129.49,-270.27C134.87,-275.48 140.18,-281.55 144,-288 153.56,-304.17 159.09,-324.63 162.21,-340.81"/>
|
||||||
|
<polygon fill="black" stroke="black" points="158.78,-341.49 163.94,-350.74 165.68,-340.29 158.78,-341.49"/>
|
||||||
|
<text text-anchor="middle" x="265.5" y="-321.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys]</text>
|
||||||
|
<text text-anchor="middle" x="265.5" y="-306.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
||||||
|
<text text-anchor="middle" x="265.5" y="-291.8" font-family="Times,serif" font-size="14.00"> -> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Processing->END -->
|
||||||
|
<g id="edge9" class="edge">
|
||||||
|
<title>NotSkippingPrefix.Processing->END</title>
|
||||||
|
<path fill="none" stroke="black" d="M349.96,-237.93C333,-232.81 316.36,-225.74 302,-216 275.27,-197.87 285.01,-177.6 261,-156 247.64,-143.98 229.41,-134.62 215.65,-128.62"/>
|
||||||
|
<polygon fill="black" stroke="black" points="216.74,-125.28 206.16,-124.7 214.07,-131.75 216.74,-125.28"/>
|
||||||
|
<text text-anchor="middle" x="378" y="-189.8" font-family="Times,serif" font-size="14.00">[nKeys == maxKeys]</text>
|
||||||
|
<text text-anchor="middle" x="378" y="-174.8" font-family="Times,serif" font-size="14.00"> -> FILTER_END</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Processing->NotSkippingPrefix.Idle -->
|
||||||
|
<g id="edge11" class="edge">
|
||||||
|
<title>NotSkippingPrefix.Processing->NotSkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M499.64,-270.11C506.59,-274.86 512.87,-280.76 517,-288 526.9,-305.38 528.94,-316.96 517,-333 513.56,-337.62 509.53,-341.66 505.07,-345.18"/>
|
||||||
|
<polygon fill="black" stroke="black" points="502.89,-342.43 496.63,-350.98 506.85,-348.2 502.89,-342.43"/>
|
||||||
|
<text text-anchor="middle" x="690.5" y="-321.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys and not hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="690.5" y="-306.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
||||||
|
<text text-anchor="middle" x="690.5" y="-291.8" font-family="Times,serif" font-size="14.00"> -> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge10" class="edge">
|
||||||
|
<title>NotSkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M458,-233.74C458,-211.98 458,-174.32 458,-148.56"/>
|
||||||
|
<polygon fill="black" stroke="black" points="461.5,-148.33 458,-138.33 454.5,-148.33 461.5,-148.33"/>
|
||||||
|
<text text-anchor="middle" x="609.5" y="-204.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys and hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="609.5" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
||||||
|
<text text-anchor="middle" x="609.5" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
||||||
|
<text text-anchor="middle" x="609.5" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge12" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M593.49,-36.23C591.32,-50.84 586,-71.39 573,-84 567.75,-89.09 561.77,-93.45 555.38,-97.17"/>
|
||||||
|
<polygon fill="black" stroke="black" points="553.66,-94.12 546.43,-101.87 556.91,-100.32 553.66,-94.12"/>
|
||||||
|
<text text-anchor="middle" x="672" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
||||||
|
<text text-anchor="middle" x="672" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->NotSkippingPrefix.Processing -->
|
||||||
|
<g id="edge13" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->NotSkippingPrefix.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M703.16,-31.64C728.6,-36.87 750.75,-44.11 759,-54 778.46,-77.34 776.26,-200.01 762,-216 749.37,-230.17 656.13,-239.42 576.2,-244.84"/>
|
||||||
|
<polygon fill="black" stroke="black" points="575.77,-241.36 566.03,-245.51 576.24,-248.34 575.77,-241.36"/>
|
||||||
|
<text text-anchor="middle" x="870" y="-116.3" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 12 KiB |
|
@ -0,0 +1,50 @@
|
||||||
|
digraph {
|
||||||
|
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
||||||
|
edge [fontsize=14];
|
||||||
|
rankdir=TB;
|
||||||
|
|
||||||
|
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
||||||
|
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
||||||
|
|
||||||
|
node [fillcolor="lightgrey"];
|
||||||
|
"NotSkipping.Idle" [label="NotSkipping",group="NotSkipping",width=4];
|
||||||
|
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
"WaitForNullKey.Idle" [label="WaitForNullKey",group="WaitForNullKey"];
|
||||||
|
"SkippingVersions.Idle" [label="SkippingVersions",group="SkippingVersions"];
|
||||||
|
|
||||||
|
node [fillcolor="lightblue"];
|
||||||
|
"NotSkipping.Processing" [label="NotSkipping",group="NotSkipping",width=4];
|
||||||
|
"NotSkippingV0.Processing" [label="NotSkippingV0",group="NotSkipping",width=4];
|
||||||
|
"NotSkippingV1.Processing" [label="NotSkippingV1",group="NotSkipping",width=4];
|
||||||
|
"NotSkippingCommon.Processing" [label="NotSkippingCommon",group="NotSkipping",width=4];
|
||||||
|
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
||||||
|
"WaitForNullKey.Processing" [label="WaitForNullKey",group="WaitForNullKey"];
|
||||||
|
"SkippingVersions.Processing" [label="SkippingVersions",group="SkippingVersions"];
|
||||||
|
|
||||||
|
START -> "WaitForNullKey.Idle" [label="[versionIdMarker != undefined]"]
|
||||||
|
START -> "NotSkipping.Idle" [label="[versionIdMarker == undefined]"]
|
||||||
|
|
||||||
|
"NotSkipping.Idle" -> "NotSkipping.Processing" [label="filter(key, value)"]
|
||||||
|
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
||||||
|
"WaitForNullKey.Idle" -> "WaitForNullKey.Processing" [label="filter(key, value)"]
|
||||||
|
"SkippingVersions.Idle" -> "SkippingVersions.Processing" [label="filter(key, value)"]
|
||||||
|
|
||||||
|
"NotSkipping.Processing" -> "NotSkippingV0.Processing" [label="vFormat='v0'"]
|
||||||
|
"NotSkipping.Processing" -> "NotSkippingV1.Processing" [label="vFormat='v1'"]
|
||||||
|
|
||||||
|
"WaitForNullKey.Processing" -> "NotSkipping.Processing" [label="master(key) != keyMarker"]
|
||||||
|
"WaitForNullKey.Processing" -> "SkippingVersions.Processing" [label="master(key) == keyMarker"]
|
||||||
|
"NotSkippingV0.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(<ReplayPrefix>)]\n/ prefix <- <ReplayPrefix>\n-> FILTER_SKIP"]
|
||||||
|
"NotSkippingV0.Processing" -> "NotSkipping.Idle" [label="[Version.isPHD(value)]\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingV0.Processing" -> "NotSkippingCommon.Processing" [label="[not key.startsWith(<ReplayPrefix>)\nand not Version.isPHD(value)]"]
|
||||||
|
"NotSkippingV1.Processing" -> "NotSkippingCommon.Processing" [label="[always]"]
|
||||||
|
"NotSkippingCommon.Processing" -> END [label="[isListableKey(key, value) and\nKeys == maxKeys]\n-> FILTER_END"]
|
||||||
|
"NotSkippingCommon.Processing" -> "SkippingPrefix.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nhasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
||||||
|
"NotSkippingCommon.Processing" -> "NotSkipping.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nnot hasDelimiter(key)]\n/ Contents.append(key, versionId, value)\n-> FILTER_ACCEPT"]
|
||||||
|
|
||||||
|
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
||||||
|
"SkippingPrefix.Processing" -> "NotSkipping.Processing" [label="[not key.startsWith(prefix)]"]
|
||||||
|
"SkippingVersions.Processing" -> "NotSkipping.Processing" [label="master(key) !== keyMarker or \nversionId > versionIdMarker"]
|
||||||
|
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="master(key) === keyMarker and \nversionId < versionIdMarker\n-> FILTER_SKIP"]
|
||||||
|
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="master(key) === keyMarker and \nversionId == versionIdMarker\n-> FILTER_ACCEPT"]
|
||||||
|
}
|
|
@ -0,0 +1,265 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||||
|
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||||
|
<!-- Generated by graphviz version 2.43.0 (0)
|
||||||
|
-->
|
||||||
|
<!-- Title: %3 Pages: 1 -->
|
||||||
|
<svg width="1522pt" height="922pt"
|
||||||
|
viewBox="0.00 0.00 1522.26 922.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||||
|
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 918)">
|
||||||
|
<title>%3</title>
|
||||||
|
<polygon fill="white" stroke="transparent" points="-4,4 -4,-918 1518.26,-918 1518.26,4 -4,4"/>
|
||||||
|
<!-- START -->
|
||||||
|
<g id="node1" class="node">
|
||||||
|
<title>START</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="393.26" cy="-907" rx="7" ry="7"/>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Idle -->
|
||||||
|
<g id="node3" class="node">
|
||||||
|
<title>NotSkipping.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M436.26,-675C436.26,-675 172.26,-675 172.26,-675 166.26,-675 160.26,-669 160.26,-663 160.26,-663 160.26,-651 160.26,-651 160.26,-645 166.26,-639 172.26,-639 172.26,-639 436.26,-639 436.26,-639 442.26,-639 448.26,-645 448.26,-651 448.26,-651 448.26,-663 448.26,-663 448.26,-669 442.26,-675 436.26,-675"/>
|
||||||
|
<text text-anchor="middle" x="304.26" y="-653.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
|
||||||
|
</g>
|
||||||
|
<!-- START->NotSkipping.Idle -->
|
||||||
|
<g id="edge2" class="edge">
|
||||||
|
<title>START->NotSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M391.06,-899.87C380.45,-870.31 334.26,-741.58 313.93,-684.93"/>
|
||||||
|
<polygon fill="black" stroke="black" points="317.12,-683.46 310.45,-675.23 310.53,-685.82 317.12,-683.46"/>
|
||||||
|
<text text-anchor="middle" x="470.76" y="-783.8" font-family="Times,serif" font-size="14.00">[versionIdMarker == undefined]</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitForNullKey.Idle -->
|
||||||
|
<g id="node5" class="node">
|
||||||
|
<title>WaitForNullKey.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M692.26,-849C692.26,-849 500.26,-849 500.26,-849 494.26,-849 488.26,-843 488.26,-837 488.26,-837 488.26,-825 488.26,-825 488.26,-819 494.26,-813 500.26,-813 500.26,-813 692.26,-813 692.26,-813 698.26,-813 704.26,-819 704.26,-825 704.26,-825 704.26,-837 704.26,-837 704.26,-843 698.26,-849 692.26,-849"/>
|
||||||
|
<text text-anchor="middle" x="596.26" y="-827.2" font-family="Times,serif" font-size="16.00">WaitForNullKey</text>
|
||||||
|
</g>
|
||||||
|
<!-- START->WaitForNullKey.Idle -->
|
||||||
|
<g id="edge1" class="edge">
|
||||||
|
<title>START->WaitForNullKey.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M399.56,-903.7C420.56,-896.05 489.7,-870.85 540.08,-852.48"/>
|
||||||
|
<polygon fill="black" stroke="black" points="541.38,-855.73 549.57,-849.02 538.98,-849.16 541.38,-855.73"/>
|
||||||
|
<text text-anchor="middle" x="608.76" y="-870.8" font-family="Times,serif" font-size="14.00">[versionIdMarker != undefined]</text>
|
||||||
|
</g>
|
||||||
|
<!-- END -->
|
||||||
|
<g id="node2" class="node">
|
||||||
|
<title>END</title>
|
||||||
|
<ellipse fill="black" stroke="black" cx="45.26" cy="-120" rx="7" ry="7"/>
|
||||||
|
<ellipse fill="none" stroke="black" cx="45.26" cy="-120" rx="11" ry="11"/>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Processing -->
|
||||||
|
<g id="node7" class="node">
|
||||||
|
<title>NotSkipping.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M761.26,-558C761.26,-558 497.26,-558 497.26,-558 491.26,-558 485.26,-552 485.26,-546 485.26,-546 485.26,-534 485.26,-534 485.26,-528 491.26,-522 497.26,-522 497.26,-522 761.26,-522 761.26,-522 767.26,-522 773.26,-528 773.26,-534 773.26,-534 773.26,-546 773.26,-546 773.26,-552 767.26,-558 761.26,-558"/>
|
||||||
|
<text text-anchor="middle" x="629.26" y="-536.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Idle->NotSkipping.Processing -->
|
||||||
|
<g id="edge3" class="edge">
|
||||||
|
<title>NotSkipping.Idle->NotSkipping.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M333.17,-638.98C364.86,-620.99 417.68,-592.92 466.26,-576 483.64,-569.95 502.44,-564.74 520.88,-560.34"/>
|
||||||
|
<polygon fill="black" stroke="black" points="521.83,-563.71 530.78,-558.04 520.25,-556.89 521.83,-563.71"/>
|
||||||
|
<text text-anchor="middle" x="524.26" y="-594.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle -->
|
||||||
|
<g id="node4" class="node">
|
||||||
|
<title>SkippingPrefix.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M662.26,-138C662.26,-138 470.26,-138 470.26,-138 464.26,-138 458.26,-132 458.26,-126 458.26,-126 458.26,-114 458.26,-114 458.26,-108 464.26,-102 470.26,-102 470.26,-102 662.26,-102 662.26,-102 668.26,-102 674.26,-108 674.26,-114 674.26,-114 674.26,-126 674.26,-126 674.26,-132 668.26,-138 662.26,-138"/>
|
||||||
|
<text text-anchor="middle" x="566.26" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing -->
|
||||||
|
<g id="node11" class="node">
|
||||||
|
<title>SkippingPrefix.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M779.26,-36C779.26,-36 587.26,-36 587.26,-36 581.26,-36 575.26,-30 575.26,-24 575.26,-24 575.26,-12 575.26,-12 575.26,-6 581.26,0 587.26,0 587.26,0 779.26,0 779.26,0 785.26,0 791.26,-6 791.26,-12 791.26,-12 791.26,-24 791.26,-24 791.26,-30 785.26,-36 779.26,-36"/>
|
||||||
|
<text text-anchor="middle" x="683.26" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
||||||
|
<g id="edge4" class="edge">
|
||||||
|
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M552.64,-101.74C543.31,-87.68 534.41,-67.95 545.26,-54 549.71,-48.29 559.34,-43.36 571.56,-39.15"/>
|
||||||
|
<polygon fill="black" stroke="black" points="572.87,-42.41 581.36,-36.07 570.77,-35.73 572.87,-42.41"/>
|
||||||
|
<text text-anchor="middle" x="603.26" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitForNullKey.Processing -->
|
||||||
|
<g id="node12" class="node">
|
||||||
|
<title>WaitForNullKey.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M692.26,-762C692.26,-762 500.26,-762 500.26,-762 494.26,-762 488.26,-756 488.26,-750 488.26,-750 488.26,-738 488.26,-738 488.26,-732 494.26,-726 500.26,-726 500.26,-726 692.26,-726 692.26,-726 698.26,-726 704.26,-732 704.26,-738 704.26,-738 704.26,-750 704.26,-750 704.26,-756 698.26,-762 692.26,-762"/>
|
||||||
|
<text text-anchor="middle" x="596.26" y="-740.2" font-family="Times,serif" font-size="16.00">WaitForNullKey</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitForNullKey.Idle->WaitForNullKey.Processing -->
|
||||||
|
<g id="edge5" class="edge">
|
||||||
|
<title>WaitForNullKey.Idle->WaitForNullKey.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M596.26,-812.8C596.26,-801.16 596.26,-785.55 596.26,-772.24"/>
|
||||||
|
<polygon fill="black" stroke="black" points="599.76,-772.18 596.26,-762.18 592.76,-772.18 599.76,-772.18"/>
|
||||||
|
<text text-anchor="middle" x="654.26" y="-783.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Idle -->
|
||||||
|
<g id="node6" class="node">
|
||||||
|
<title>SkippingVersions.Idle</title>
|
||||||
|
<path fill="lightgrey" stroke="black" d="M1241.26,-558C1241.26,-558 1049.26,-558 1049.26,-558 1043.26,-558 1037.26,-552 1037.26,-546 1037.26,-546 1037.26,-534 1037.26,-534 1037.26,-528 1043.26,-522 1049.26,-522 1049.26,-522 1241.26,-522 1241.26,-522 1247.26,-522 1253.26,-528 1253.26,-534 1253.26,-534 1253.26,-546 1253.26,-546 1253.26,-552 1247.26,-558 1241.26,-558"/>
|
||||||
|
<text text-anchor="middle" x="1145.26" y="-536.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing -->
|
||||||
|
<g id="node13" class="node">
|
||||||
|
<title>SkippingVersions.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M1241.26,-675C1241.26,-675 1049.26,-675 1049.26,-675 1043.26,-675 1037.26,-669 1037.26,-663 1037.26,-663 1037.26,-651 1037.26,-651 1037.26,-645 1043.26,-639 1049.26,-639 1049.26,-639 1241.26,-639 1241.26,-639 1247.26,-639 1253.26,-645 1253.26,-651 1253.26,-651 1253.26,-663 1253.26,-663 1253.26,-669 1247.26,-675 1241.26,-675"/>
|
||||||
|
<text text-anchor="middle" x="1145.26" y="-653.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Idle->SkippingVersions.Processing -->
|
||||||
|
<g id="edge6" class="edge">
|
||||||
|
<title>SkippingVersions.Idle->SkippingVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1145.26,-558.25C1145.26,-576.77 1145.26,-606.45 1145.26,-628.25"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1141.76,-628.53 1145.26,-638.53 1148.76,-628.53 1141.76,-628.53"/>
|
||||||
|
<text text-anchor="middle" x="1203.26" y="-594.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV0.Processing -->
|
||||||
|
<g id="node8" class="node">
|
||||||
|
<title>NotSkippingV0.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M436.26,-411C436.26,-411 172.26,-411 172.26,-411 166.26,-411 160.26,-405 160.26,-399 160.26,-399 160.26,-387 160.26,-387 160.26,-381 166.26,-375 172.26,-375 172.26,-375 436.26,-375 436.26,-375 442.26,-375 448.26,-381 448.26,-387 448.26,-387 448.26,-399 448.26,-399 448.26,-405 442.26,-411 436.26,-411"/>
|
||||||
|
<text text-anchor="middle" x="304.26" y="-389.2" font-family="Times,serif" font-size="16.00">NotSkippingV0</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Processing->NotSkippingV0.Processing -->
|
||||||
|
<g id="edge7" class="edge">
|
||||||
|
<title>NotSkipping.Processing->NotSkippingV0.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M573.96,-521.95C558.07,-516.64 540.84,-510.46 525.26,-504 460.22,-477.02 387.62,-439.36 343.97,-415.84"/>
|
||||||
|
<polygon fill="black" stroke="black" points="345.57,-412.72 335.11,-411.04 342.24,-418.88 345.57,-412.72"/>
|
||||||
|
<text text-anchor="middle" x="573.76" y="-462.8" font-family="Times,serif" font-size="14.00">vFormat='v0'</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV1.Processing -->
|
||||||
|
<g id="node9" class="node">
|
||||||
|
<title>NotSkippingV1.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M758.26,-411C758.26,-411 494.26,-411 494.26,-411 488.26,-411 482.26,-405 482.26,-399 482.26,-399 482.26,-387 482.26,-387 482.26,-381 488.26,-375 494.26,-375 494.26,-375 758.26,-375 758.26,-375 764.26,-375 770.26,-381 770.26,-387 770.26,-387 770.26,-399 770.26,-399 770.26,-405 764.26,-411 758.26,-411"/>
|
||||||
|
<text text-anchor="middle" x="626.26" y="-389.2" font-family="Times,serif" font-size="16.00">NotSkippingV1</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkipping.Processing->NotSkippingV1.Processing -->
|
||||||
|
<g id="edge8" class="edge">
|
||||||
|
<title>NotSkipping.Processing->NotSkippingV1.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M628.91,-521.8C628.39,-496.94 627.44,-450.74 626.83,-421.23"/>
|
||||||
|
<polygon fill="black" stroke="black" points="630.32,-421.11 626.62,-411.18 623.33,-421.25 630.32,-421.11"/>
|
||||||
|
<text text-anchor="middle" x="676.76" y="-462.8" font-family="Times,serif" font-size="14.00">vFormat='v1'</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV0.Processing->NotSkipping.Idle -->
|
||||||
|
<g id="edge12" class="edge">
|
||||||
|
<title>NotSkippingV0.Processing->NotSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M304.26,-411.25C304.26,-455.74 304.26,-574.61 304.26,-628.62"/>
|
||||||
|
<polygon fill="black" stroke="black" points="300.76,-628.81 304.26,-638.81 307.76,-628.81 300.76,-628.81"/>
|
||||||
|
<text text-anchor="middle" x="385.76" y="-543.8" font-family="Times,serif" font-size="14.00">[Version.isPHD(value)]</text>
|
||||||
|
<text text-anchor="middle" x="385.76" y="-528.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV0.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge11" class="edge">
|
||||||
|
<title>NotSkippingV0.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M448.41,-376.93C508.52,-369.95 565.63,-362.09 570.26,-357 622.9,-299.12 594.8,-196.31 577.11,-147.78"/>
|
||||||
|
<polygon fill="black" stroke="black" points="580.33,-146.4 573.53,-138.28 573.78,-148.87 580.33,-146.4"/>
|
||||||
|
<text text-anchor="middle" x="720.26" y="-297.8" font-family="Times,serif" font-size="14.00">[key.startsWith(<ReplayPrefix>)]</text>
|
||||||
|
<text text-anchor="middle" x="720.26" y="-282.8" font-family="Times,serif" font-size="14.00">/ prefix <- <ReplayPrefix></text>
|
||||||
|
<text text-anchor="middle" x="720.26" y="-267.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingCommon.Processing -->
|
||||||
|
<g id="node10" class="node">
|
||||||
|
<title>NotSkippingCommon.Processing</title>
|
||||||
|
<path fill="lightblue" stroke="black" d="M436.26,-304.5C436.26,-304.5 172.26,-304.5 172.26,-304.5 166.26,-304.5 160.26,-298.5 160.26,-292.5 160.26,-292.5 160.26,-280.5 160.26,-280.5 160.26,-274.5 166.26,-268.5 172.26,-268.5 172.26,-268.5 436.26,-268.5 436.26,-268.5 442.26,-268.5 448.26,-274.5 448.26,-280.5 448.26,-280.5 448.26,-292.5 448.26,-292.5 448.26,-298.5 442.26,-304.5 436.26,-304.5"/>
|
||||||
|
<text text-anchor="middle" x="304.26" y="-282.7" font-family="Times,serif" font-size="16.00">NotSkippingCommon</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV0.Processing->NotSkippingCommon.Processing -->
|
||||||
|
<g id="edge13" class="edge">
|
||||||
|
<title>NotSkippingV0.Processing->NotSkippingCommon.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M304.26,-374.74C304.26,-358.48 304.26,-333.85 304.26,-314.9"/>
|
||||||
|
<polygon fill="black" stroke="black" points="307.76,-314.78 304.26,-304.78 300.76,-314.78 307.76,-314.78"/>
|
||||||
|
<text text-anchor="middle" x="435.26" y="-345.8" font-family="Times,serif" font-size="14.00">[not key.startsWith(<ReplayPrefix>)</text>
|
||||||
|
<text text-anchor="middle" x="435.26" y="-330.8" font-family="Times,serif" font-size="14.00">and not Version.isPHD(value)]</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingV1.Processing->NotSkippingCommon.Processing -->
|
||||||
|
<g id="edge14" class="edge">
|
||||||
|
<title>NotSkippingV1.Processing->NotSkippingCommon.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M616.43,-374.83C606.75,-359.62 590.48,-338.14 570.26,-327 549.98,-315.83 505.48,-307.38 458.57,-301.23"/>
|
||||||
|
<polygon fill="black" stroke="black" points="458.9,-297.74 448.53,-299.95 458.01,-304.69 458.9,-297.74"/>
|
||||||
|
<text text-anchor="middle" x="632.26" y="-338.3" font-family="Times,serif" font-size="14.00">[always]</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingCommon.Processing->END -->
|
||||||
|
<g id="edge15" class="edge">
|
||||||
|
<title>NotSkippingCommon.Processing->END</title>
|
||||||
|
<path fill="none" stroke="black" d="M159.92,-279.56C109.8,-274.24 62.13,-264.33 46.26,-246 20.92,-216.72 30.42,-167.54 38.5,-140.42"/>
|
||||||
|
<polygon fill="black" stroke="black" points="41.94,-141.16 41.67,-130.57 35.27,-139.02 41.94,-141.16"/>
|
||||||
|
<text text-anchor="middle" x="152.76" y="-212.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="152.76" y="-197.3" font-family="Times,serif" font-size="14.00">Keys == maxKeys]</text>
|
||||||
|
<text text-anchor="middle" x="152.76" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingCommon.Processing->NotSkipping.Idle -->
|
||||||
|
<g id="edge17" class="edge">
|
||||||
|
<title>NotSkippingCommon.Processing->NotSkipping.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M214.74,-304.54C146.51,-322.73 57.06,-358.99 13.26,-429 -49.27,-528.95 128.43,-602.49 233.32,-635.95"/>
|
||||||
|
<polygon fill="black" stroke="black" points="232.34,-639.31 242.93,-638.97 234.43,-632.63 232.34,-639.31"/>
|
||||||
|
<text text-anchor="middle" x="156.76" y="-492.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="156.76" y="-477.8" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||||
|
<text text-anchor="middle" x="156.76" y="-462.8" font-family="Times,serif" font-size="14.00">not hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="156.76" y="-447.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, versionId, value)</text>
|
||||||
|
<text text-anchor="middle" x="156.76" y="-432.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- NotSkippingCommon.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge16" class="edge">
|
||||||
|
<title>NotSkippingCommon.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M292.14,-268.23C288.18,-261.59 284.27,-253.75 282.26,-246 272.21,-207.28 255.76,-185.96 282.26,-156 293.6,-143.18 374.98,-134.02 447.74,-128.3"/>
|
||||||
|
<polygon fill="black" stroke="black" points="448.24,-131.77 457.94,-127.51 447.7,-124.79 448.24,-131.77"/>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-234.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-219.8" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-204.8" font-family="Times,serif" font-size="14.00">hasDelimiter(key)]</text>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
||||||
|
<text text-anchor="middle" x="428.26" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
||||||
|
<g id="edge18" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M681.57,-36.04C679.28,-50.54 673.9,-71.03 661.26,-84 656.4,-88.99 650.77,-93.28 644.72,-96.95"/>
|
||||||
|
<polygon fill="black" stroke="black" points="642.71,-94.06 635.6,-101.92 646.05,-100.21 642.71,-94.06"/>
|
||||||
|
<text text-anchor="middle" x="759.26" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
||||||
|
<text text-anchor="middle" x="759.26" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingPrefix.Processing->NotSkipping.Processing -->
|
||||||
|
<g id="edge19" class="edge">
|
||||||
|
<title>SkippingPrefix.Processing->NotSkipping.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M791.46,-33.51C815.84,-38.71 837.21,-45.46 846.26,-54 868.07,-74.57 864.26,-89.02 864.26,-119 864.26,-394 864.26,-394 864.26,-394 864.26,-462.4 791.27,-499.6 726.64,-519.12"/>
|
||||||
|
<polygon fill="black" stroke="black" points="725.39,-515.84 716.77,-521.99 727.35,-522.56 725.39,-515.84"/>
|
||||||
|
<text text-anchor="middle" x="961.26" y="-282.8" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitForNullKey.Processing->NotSkipping.Processing -->
|
||||||
|
<g id="edge9" class="edge">
|
||||||
|
<title>WaitForNullKey.Processing->NotSkipping.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M599.08,-725.78C604.81,-690.67 617.89,-610.59 624.8,-568.31"/>
|
||||||
|
<polygon fill="black" stroke="black" points="628.3,-568.61 626.46,-558.18 621.39,-567.48 628.3,-568.61"/>
|
||||||
|
<text text-anchor="middle" x="707.26" y="-653.3" font-family="Times,serif" font-size="14.00">master(key) != keyMarker</text>
|
||||||
|
</g>
|
||||||
|
<!-- WaitForNullKey.Processing->SkippingVersions.Processing -->
|
||||||
|
<g id="edge10" class="edge">
|
||||||
|
<title>WaitForNullKey.Processing->SkippingVersions.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M704.4,-726.26C797.32,-711.87 931.09,-691.16 1026.87,-676.33"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1027.55,-679.77 1036.89,-674.78 1026.47,-672.85 1027.55,-679.77"/>
|
||||||
|
<text text-anchor="middle" x="1001.26" y="-696.8" font-family="Times,serif" font-size="14.00">master(key) == keyMarker</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing->SkippingVersions.Idle -->
|
||||||
|
<g id="edge21" class="edge">
|
||||||
|
<title>SkippingVersions.Processing->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M1241.89,-638.98C1249.74,-634.29 1256.75,-628.4 1262.26,-621 1274.21,-604.96 1274.21,-592.04 1262.26,-576 1258.82,-571.38 1254.79,-567.34 1250.33,-563.82"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1252.11,-560.8 1241.89,-558.02 1248.15,-566.57 1252.11,-560.8"/>
|
||||||
|
<text text-anchor="middle" x="1392.26" y="-609.8" font-family="Times,serif" font-size="14.00">master(key) === keyMarker and </text>
|
||||||
|
<text text-anchor="middle" x="1392.26" y="-594.8" font-family="Times,serif" font-size="14.00">versionId < versionIdMarker</text>
|
||||||
|
<text text-anchor="middle" x="1392.26" y="-579.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing->SkippingVersions.Idle -->
|
||||||
|
<g id="edge22" class="edge">
|
||||||
|
<title>SkippingVersions.Processing->SkippingVersions.Idle</title>
|
||||||
|
<path fill="none" stroke="black" d="M1036.97,-654.38C978.97,-650.96 915.73,-642.25 897.26,-621 884.15,-605.9 884.15,-591.1 897.26,-576 914.65,-555.99 971.71,-547.1 1026.73,-543.28"/>
|
||||||
|
<polygon fill="black" stroke="black" points="1027.21,-546.76 1036.97,-542.62 1026.76,-539.77 1027.21,-546.76"/>
|
||||||
|
<text text-anchor="middle" x="1019.26" y="-609.8" font-family="Times,serif" font-size="14.00">master(key) === keyMarker and </text>
|
||||||
|
<text text-anchor="middle" x="1019.26" y="-594.8" font-family="Times,serif" font-size="14.00">versionId == versionIdMarker</text>
|
||||||
|
<text text-anchor="middle" x="1019.26" y="-579.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
||||||
|
</g>
|
||||||
|
<!-- SkippingVersions.Processing->NotSkipping.Processing -->
|
||||||
|
<g id="edge20" class="edge">
|
||||||
|
<title>SkippingVersions.Processing->NotSkipping.Processing</title>
|
||||||
|
<path fill="none" stroke="black" d="M1037.02,-651.24C897.84,-644.67 672.13,-632.37 657.26,-621 641.04,-608.6 634.18,-586.13 631.3,-568.16"/>
|
||||||
|
<polygon fill="black" stroke="black" points="634.76,-567.68 630.02,-558.21 627.82,-568.57 634.76,-567.68"/>
|
||||||
|
<text text-anchor="middle" x="770.26" y="-602.3" font-family="Times,serif" font-size="14.00">master(key) !== keyMarker or </text>
|
||||||
|
<text text-anchor="middle" x="770.26" y="-587.3" font-family="Times,serif" font-size="14.00">versionId > versionIdMarker</text>
|
||||||
|
</g>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 21 KiB |
12
index.ts
12
index.ts
|
@ -1,6 +1,9 @@
|
||||||
import * as evaluators from './lib/policyEvaluator/evaluator';
|
import * as evaluators from './lib/policyEvaluator/evaluator';
|
||||||
import evaluatePrincipal from './lib/policyEvaluator/principal';
|
import evaluatePrincipal from './lib/policyEvaluator/principal';
|
||||||
import RequestContext from './lib/policyEvaluator/RequestContext';
|
import RequestContext, {
|
||||||
|
actionNeedQuotaCheck,
|
||||||
|
actionNeedQuotaCheckCopy,
|
||||||
|
actionWithDataDeletion } from './lib/policyEvaluator/RequestContext';
|
||||||
import * as requestUtils from './lib/policyEvaluator/requestUtils';
|
import * as requestUtils from './lib/policyEvaluator/requestUtils';
|
||||||
import * as actionMaps from './lib/policyEvaluator/utils/actionMaps';
|
import * as actionMaps from './lib/policyEvaluator/utils/actionMaps';
|
||||||
import { validateUserPolicy } from './lib/policy/policyValidator'
|
import { validateUserPolicy } from './lib/policy/policyValidator'
|
||||||
|
@ -25,6 +28,7 @@ import * as objectRestore from './lib/s3middleware/objectRestore';
|
||||||
import * as lifecycleHelpers from './lib/s3middleware/lifecycleHelpers';
|
import * as lifecycleHelpers from './lib/s3middleware/lifecycleHelpers';
|
||||||
export { default as errors } from './lib/errors';
|
export { default as errors } from './lib/errors';
|
||||||
export { default as Clustering } from './lib/Clustering';
|
export { default as Clustering } from './lib/Clustering';
|
||||||
|
export * as ClusterRPC from './lib/clustering/ClusterRPC';
|
||||||
export * as ipCheck from './lib/ipCheck';
|
export * as ipCheck from './lib/ipCheck';
|
||||||
export * as auth from './lib/auth/auth';
|
export * as auth from './lib/auth/auth';
|
||||||
export * as constants from './lib/constants';
|
export * as constants from './lib/constants';
|
||||||
|
@ -48,12 +52,15 @@ export const algorithms = {
|
||||||
Skip: require('./lib/algos/list/skip'),
|
Skip: require('./lib/algos/list/skip'),
|
||||||
},
|
},
|
||||||
cache: {
|
cache: {
|
||||||
|
GapSet: require('./lib/algos/cache/GapSet'),
|
||||||
|
GapCache: require('./lib/algos/cache/GapCache'),
|
||||||
LRUCache: require('./lib/algos/cache/LRUCache'),
|
LRUCache: require('./lib/algos/cache/LRUCache'),
|
||||||
},
|
},
|
||||||
stream: {
|
stream: {
|
||||||
MergeStream: require('./lib/algos/stream/MergeStream'),
|
MergeStream: require('./lib/algos/stream/MergeStream'),
|
||||||
},
|
},
|
||||||
SortedSet: require('./lib/algos/set/SortedSet'),
|
SortedSet: require('./lib/algos/set/SortedSet'),
|
||||||
|
Heap: require('./lib/algos/heap/Heap'),
|
||||||
};
|
};
|
||||||
|
|
||||||
export const policies = {
|
export const policies = {
|
||||||
|
@ -63,6 +70,9 @@ export const policies = {
|
||||||
RequestContext,
|
RequestContext,
|
||||||
requestUtils,
|
requestUtils,
|
||||||
actionMaps,
|
actionMaps,
|
||||||
|
actionNeedQuotaCheck,
|
||||||
|
actionWithDataDeletion,
|
||||||
|
actionNeedQuotaCheckCopy,
|
||||||
};
|
};
|
||||||
|
|
||||||
export const testing = {
|
export const testing = {
|
||||||
|
|
|
@ -0,0 +1,363 @@
|
||||||
|
import { OrderedSet } from '@js-sdsl/ordered-set';
|
||||||
|
import {
|
||||||
|
default as GapSet,
|
||||||
|
GapSetEntry,
|
||||||
|
} from './GapSet';
|
||||||
|
|
||||||
|
// the API is similar but is not strictly a superset of GapSetInterface
|
||||||
|
// so we don't extend from it
|
||||||
|
export interface GapCacheInterface {
|
||||||
|
exposureDelayMs: number;
|
||||||
|
maxGapWeight: number;
|
||||||
|
size: number;
|
||||||
|
|
||||||
|
setGap: (firstKey: string, lastKey: string, weight: number) => void;
|
||||||
|
removeOverlappingGaps: (overlappingKeys: string[]) => number;
|
||||||
|
lookupGap: (minKey: string, maxKey?: string) => Promise<GapSetEntry | null>;
|
||||||
|
[Symbol.iterator]: () => Iterator<GapSetEntry>;
|
||||||
|
toArray: () => GapSetEntry[];
|
||||||
|
};
|
||||||
|
|
||||||
|
class GapCacheUpdateSet {
|
||||||
|
newGaps: GapSet;
|
||||||
|
updatedKeys: OrderedSet<string>;
|
||||||
|
|
||||||
|
constructor(maxGapWeight: number) {
|
||||||
|
this.newGaps = new GapSet(maxGapWeight);
|
||||||
|
this.updatedKeys = new OrderedSet();
|
||||||
|
}
|
||||||
|
|
||||||
|
addUpdateBatch(updatedKeys: OrderedSet<string>): void {
|
||||||
|
this.updatedKeys.union(updatedKeys);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cache of listing "gaps" i.e. ranges of keys that can be skipped
|
||||||
|
* over during listing (because they only contain delete markers as
|
||||||
|
* latest versions).
|
||||||
|
*
|
||||||
|
* Typically, a single GapCache instance would be attached to a raft session.
|
||||||
|
*
|
||||||
|
* The API usage is as follows:
|
||||||
|
*
|
||||||
|
* - Initialize a GapCache instance by calling start() (this starts an internal timer)
|
||||||
|
*
|
||||||
|
* - Insert a gap or update an existing one via setGap()
|
||||||
|
*
|
||||||
|
* - Lookup existing gaps via lookupGap()
|
||||||
|
*
|
||||||
|
* - Invalidate gaps that overlap a specific set of keys via removeOverlappingGaps()
|
||||||
|
*
|
||||||
|
* - Shut down a GapCache instance by calling stop() (this stops the internal timer)
|
||||||
|
*
|
||||||
|
* Gaps inserted via setGap() are not exposed immediately to lookupGap(), but only:
|
||||||
|
*
|
||||||
|
* - after a certain delay always larger than 'exposureDelayMs' and usually shorter
|
||||||
|
* than twice this value (but might be slightly longer in rare cases)
|
||||||
|
*
|
||||||
|
* - and only if they haven't been invalidated by a recent call to removeOverlappingGaps()
|
||||||
|
*
|
||||||
|
* This ensures atomicity between gap creation and invalidation from updates under
|
||||||
|
* the condition that a gap is created from first key to last key within the time defined
|
||||||
|
* by 'exposureDelayMs'.
|
||||||
|
*
|
||||||
|
* The implementation is based on two extra temporary "update sets" on top of the main
|
||||||
|
* exposed gap set, one called "staging" and the other "frozen", each containing a
|
||||||
|
* temporary updated gap set and a list of updated keys to invalidate gaps with (coming
|
||||||
|
* from calls to removeOverlappingGaps()). Every "exposureDelayMs" milliseconds, the frozen
|
||||||
|
* gaps are invalidated by all key updates coming from either of the "staging" or "frozen"
|
||||||
|
* update set, then merged into the exposed gaps set, after which the staging updates become
|
||||||
|
* the frozen updates and won't receive any new gap until the next cycle.
|
||||||
|
*/
|
||||||
|
export default class GapCache implements GapCacheInterface {
|
||||||
|
_exposureDelayMs: number;
|
||||||
|
maxGaps: number;
|
||||||
|
|
||||||
|
_stagingUpdates: GapCacheUpdateSet;
|
||||||
|
_frozenUpdates: GapCacheUpdateSet;
|
||||||
|
_exposedGaps: GapSet;
|
||||||
|
_exposeFrozenInterval: NodeJS.Timeout | null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
*
|
||||||
|
* @param {number} exposureDelayMs - minimum delay between
|
||||||
|
* insertion of a gap via setGap() and its exposure via
|
||||||
|
* lookupGap()
|
||||||
|
* @param {number} maxGaps - maximum number of cached gaps, after
|
||||||
|
* which no new gap can be added by setGap(). (Note: a future
|
||||||
|
* improvement could replace this by an eviction strategy)
|
||||||
|
* @param {number} maxGapWeight - maximum "weight" of individual
|
||||||
|
* cached gaps, which is also the granularity for
|
||||||
|
* invalidation. Individual gaps can be chained together,
|
||||||
|
* which lookupGap() transparently consolidates in the response
|
||||||
|
* into a single large gap.
|
||||||
|
*/
|
||||||
|
constructor(exposureDelayMs: number, maxGaps: number, maxGapWeight: number) {
|
||||||
|
this._exposureDelayMs = exposureDelayMs;
|
||||||
|
this.maxGaps = maxGaps;
|
||||||
|
|
||||||
|
this._stagingUpdates = new GapCacheUpdateSet(maxGapWeight);
|
||||||
|
this._frozenUpdates = new GapCacheUpdateSet(maxGapWeight);
|
||||||
|
this._exposedGaps = new GapSet(maxGapWeight);
|
||||||
|
this._exposeFrozenInterval = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a GapCache from an array of exposed gap entries (used in tests)
|
||||||
|
*
|
||||||
|
* @return {GapCache} - a new GapCache instance
|
||||||
|
*/
|
||||||
|
static createFromArray(
|
||||||
|
gaps: GapSetEntry[],
|
||||||
|
exposureDelayMs: number,
|
||||||
|
maxGaps: number,
|
||||||
|
maxGapWeight: number
|
||||||
|
): GapCache {
|
||||||
|
const gapCache = new GapCache(exposureDelayMs, maxGaps, maxGapWeight);
|
||||||
|
gapCache._exposedGaps = GapSet.createFromArray(gaps, maxGapWeight)
|
||||||
|
return gapCache;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Internal helper to remove gaps in the staging and frozen sets
|
||||||
|
* overlapping with previously updated keys, right before the
|
||||||
|
* frozen gaps get exposed.
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
_removeOverlappingGapsBeforeExpose(): void {
|
||||||
|
for (const { updatedKeys } of [this._stagingUpdates, this._frozenUpdates]) {
|
||||||
|
if (updatedKeys.size() === 0) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
for (const { newGaps } of [this._stagingUpdates, this._frozenUpdates]) {
|
||||||
|
if (newGaps.size === 0) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
newGaps.removeOverlappingGaps(updatedKeys);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function is the core mechanism that updates the exposed gaps in the
|
||||||
|
* cache. It is called on a regular interval defined by 'exposureDelayMs'.
|
||||||
|
*
|
||||||
|
* It does the following in order:
|
||||||
|
*
|
||||||
|
* - remove gaps from the frozen set that overlap with any key present in a
|
||||||
|
* batch passed to removeOverlappingGaps() since the last two triggers of
|
||||||
|
* _exposeFrozen()
|
||||||
|
*
|
||||||
|
* - merge the remaining gaps from the frozen set to the exposed set, which
|
||||||
|
* makes them visible from calls to lookupGap()
|
||||||
|
*
|
||||||
|
* - rotate by freezing the currently staging updates and initiating a new
|
||||||
|
* staging updates set
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
_exposeFrozen(): void {
|
||||||
|
this._removeOverlappingGapsBeforeExpose();
|
||||||
|
for (const gap of this._frozenUpdates.newGaps) {
|
||||||
|
// Use a trivial strategy to keep the cache size within
|
||||||
|
// limits: refuse to add new gaps when the size is above
|
||||||
|
// the 'maxGaps' threshold. We solely rely on
|
||||||
|
// removeOverlappingGaps() to make space for new gaps.
|
||||||
|
if (this._exposedGaps.size < this.maxGaps) {
|
||||||
|
this._exposedGaps.setGap(gap.firstKey, gap.lastKey, gap.weight);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this._frozenUpdates = this._stagingUpdates;
|
||||||
|
this._stagingUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start the internal GapCache timer
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
start(): void {
|
||||||
|
if (this._exposeFrozenInterval) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this._exposeFrozenInterval = setInterval(
|
||||||
|
() => this._exposeFrozen(),
|
||||||
|
this._exposureDelayMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop the internal GapCache timer
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
stop(): void {
|
||||||
|
if (this._exposeFrozenInterval) {
|
||||||
|
clearInterval(this._exposeFrozenInterval);
|
||||||
|
this._exposeFrozenInterval = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record a gap between two keys, associated with a weight to
|
||||||
|
* limit individual gap's spanning ranges in the cache, for a more
|
||||||
|
* granular invalidation.
|
||||||
|
*
|
||||||
|
* The function handles splitting and merging existing gaps to
|
||||||
|
* maintain an optimal weight of cache entries.
|
||||||
|
*
|
||||||
|
* NOTE 1: the caller must ensure that the full length of the gap
|
||||||
|
* between 'firstKey' and 'lastKey' has been built from a listing
|
||||||
|
* snapshot that is more recent than 'exposureDelayMs' milliseconds,
|
||||||
|
* in order to guarantee that the exposed gap will be fully
|
||||||
|
* covered (and potentially invalidated) from recent calls to
|
||||||
|
* removeOverlappingGaps().
|
||||||
|
*
|
||||||
|
* NOTE 2: a usual pattern when building a large gap from multiple
|
||||||
|
* calls to setGap() is to start the next gap from 'lastKey',
|
||||||
|
* which will be passed as 'firstKey' in the next call, so that
|
||||||
|
* gaps can be chained together and consolidated by lookupGap().
|
||||||
|
*
|
||||||
|
* @param {string} firstKey - first key of the gap
|
||||||
|
* @param {string} lastKey - last key of the gap, must be greater
|
||||||
|
* or equal than 'firstKey'
|
||||||
|
* @param {number} weight - total weight between 'firstKey' and 'lastKey'
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
setGap(firstKey: string, lastKey: string, weight: number): void {
|
||||||
|
this._stagingUpdates.newGaps.setGap(firstKey, lastKey, weight);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove gaps that overlap with a given set of keys. Used to
|
||||||
|
* invalidate gaps when keys are inserted or deleted.
|
||||||
|
*
|
||||||
|
* @param {OrderedSet<string> | string[]} overlappingKeys - remove gaps that
|
||||||
|
* overlap with any of this set of keys
|
||||||
|
* @return {number} - how many gaps were removed from the exposed
|
||||||
|
* gaps only (overlapping gaps not yet exposed are also invalidated
|
||||||
|
* but are not accounted for in the returned value)
|
||||||
|
*/
|
||||||
|
removeOverlappingGaps(overlappingKeys: OrderedSet<string> | string[]): number {
|
||||||
|
let overlappingKeysSet;
|
||||||
|
if (Array.isArray(overlappingKeys)) {
|
||||||
|
overlappingKeysSet = new OrderedSet(overlappingKeys);
|
||||||
|
} else {
|
||||||
|
overlappingKeysSet = overlappingKeys;
|
||||||
|
}
|
||||||
|
this._stagingUpdates.addUpdateBatch(overlappingKeysSet);
|
||||||
|
return this._exposedGaps.removeOverlappingGaps(overlappingKeysSet);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lookup the next exposed gap that overlaps with [minKey, maxKey]. Internally
|
||||||
|
* chained gaps are coalesced in the response into a single contiguous large gap.
|
||||||
|
*
|
||||||
|
* @param {string} minKey - minimum key overlapping with the returned gap
|
||||||
|
* @param {string} [maxKey] - maximum key overlapping with the returned gap
|
||||||
|
* @return {Promise<GapSetEntry | null>} - result of the lookup if a gap
|
||||||
|
* was found, null otherwise, as a Promise
|
||||||
|
*/
|
||||||
|
lookupGap(minKey: string, maxKey?: string): Promise<GapSetEntry | null> {
|
||||||
|
return this._exposedGaps.lookupGap(minKey, maxKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the maximum weight setting for individual gaps.
|
||||||
|
*
|
||||||
|
* @return {number} - maximum weight of individual gaps
|
||||||
|
*/
|
||||||
|
get maxGapWeight(): number {
|
||||||
|
return this._exposedGaps.maxWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the maximum weight setting for individual gaps.
|
||||||
|
*
|
||||||
|
* @param {number} gapWeight - maximum weight of individual gaps
|
||||||
|
*/
|
||||||
|
set maxGapWeight(gapWeight: number) {
|
||||||
|
this._exposedGaps.maxWeight = gapWeight;
|
||||||
|
// also update transient gap sets
|
||||||
|
this._stagingUpdates.newGaps.maxWeight = gapWeight;
|
||||||
|
this._frozenUpdates.newGaps.maxWeight = gapWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the exposure delay in milliseconds, which is the minimum
|
||||||
|
* time after which newly cached gaps will be exposed by
|
||||||
|
* lookupGap().
|
||||||
|
*
|
||||||
|
* @return {number} - exposure delay in milliseconds
|
||||||
|
*/
|
||||||
|
get exposureDelayMs(): number {
|
||||||
|
return this._exposureDelayMs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the exposure delay in milliseconds, which is the minimum
|
||||||
|
* time after which newly cached gaps will be exposed by
|
||||||
|
* lookupGap(). Setting this attribute automatically updates the
|
||||||
|
* internal state to honor the new value.
|
||||||
|
*
|
||||||
|
* @param {number} - exposure delay in milliseconds
|
||||||
|
*/
|
||||||
|
set exposureDelayMs(exposureDelayMs: number) {
|
||||||
|
if (exposureDelayMs !== this._exposureDelayMs) {
|
||||||
|
this._exposureDelayMs = exposureDelayMs;
|
||||||
|
if (this._exposeFrozenInterval) {
|
||||||
|
// invalidate all pending gap updates, as the new interval may not be
|
||||||
|
// safe for them
|
||||||
|
this._stagingUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
||||||
|
this._frozenUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
||||||
|
|
||||||
|
// reinitialize the _exposeFrozenInterval timer with the updated delay
|
||||||
|
this.stop();
|
||||||
|
this.start();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the number of exposed gaps
|
||||||
|
*
|
||||||
|
* @return {number} number of exposed gaps
|
||||||
|
*/
|
||||||
|
get size(): number {
|
||||||
|
return this._exposedGaps.size;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Iterate over exposed gaps
|
||||||
|
*
|
||||||
|
* @return {Iterator<GapSetEntry>} an iterator over exposed gaps
|
||||||
|
*/
|
||||||
|
[Symbol.iterator](): Iterator<GapSetEntry> {
|
||||||
|
return this._exposedGaps[Symbol.iterator]();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get an array of all exposed gaps
|
||||||
|
*
|
||||||
|
* @return {GapSetEntry[]} array of exposed gaps
|
||||||
|
*/
|
||||||
|
toArray(): GapSetEntry[] {
|
||||||
|
return this._exposedGaps.toArray();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear all exposed and staging gaps from the cache.
|
||||||
|
*
|
||||||
|
* Note: retains invalidating updates from removeOverlappingGaps()
|
||||||
|
* for correctness of gaps inserted afterwards.
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
clear(): void {
|
||||||
|
this._stagingUpdates.newGaps = new GapSet(this.maxGapWeight);
|
||||||
|
this._frozenUpdates.newGaps = new GapSet(this.maxGapWeight);
|
||||||
|
this._exposedGaps = new GapSet(this.maxGapWeight);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,366 @@
|
||||||
|
import assert from 'assert';
|
||||||
|
import { OrderedSet } from '@js-sdsl/ordered-set';
|
||||||
|
|
||||||
|
import errors from '../../errors';
|
||||||
|
|
||||||
|
export type GapSetEntry = {
|
||||||
|
firstKey: string,
|
||||||
|
lastKey: string,
|
||||||
|
weight: number,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface GapSetInterface {
|
||||||
|
maxWeight: number;
|
||||||
|
size: number;
|
||||||
|
|
||||||
|
setGap: (firstKey: string, lastKey: string, weight: number) => GapSetEntry;
|
||||||
|
removeOverlappingGaps: (overlappingKeys: string[]) => number;
|
||||||
|
lookupGap: (minKey: string, maxKey?: string) => Promise<GapSetEntry | null>;
|
||||||
|
[Symbol.iterator]: () => Iterator<GapSetEntry>;
|
||||||
|
toArray: () => GapSetEntry[];
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Specialized data structure to support caching of listing "gaps",
|
||||||
|
* i.e. ranges of keys that can be skipped over during listing
|
||||||
|
* (because they only contain delete markers as latest versions)
|
||||||
|
*/
|
||||||
|
export default class GapSet implements GapSetInterface, Iterable<GapSetEntry> {
|
||||||
|
_gaps: OrderedSet<GapSetEntry>;
|
||||||
|
_maxWeight: number;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
|
||||||
|
* @param {number} maxWeight - weight threshold for each cached
|
||||||
|
* gap (unitless). Triggers splitting gaps when reached
|
||||||
|
*/
|
||||||
|
constructor(maxWeight: number) {
|
||||||
|
this._gaps = new OrderedSet(
|
||||||
|
[],
|
||||||
|
(left: GapSetEntry, right: GapSetEntry) => (
|
||||||
|
left.firstKey < right.firstKey ? -1 :
|
||||||
|
left.firstKey > right.firstKey ? 1 : 0
|
||||||
|
)
|
||||||
|
);
|
||||||
|
this._maxWeight = maxWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a GapSet from an array of gap entries (used in tests)
|
||||||
|
*/
|
||||||
|
static createFromArray(gaps: GapSetEntry[], maxWeight: number): GapSet {
|
||||||
|
const gapSet = new GapSet(maxWeight);
|
||||||
|
for (const gap of gaps) {
|
||||||
|
gapSet._gaps.insert(gap);
|
||||||
|
}
|
||||||
|
return gapSet;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Record a gap between two keys, associated with a weight to limit
|
||||||
|
* individual gap sizes in the cache.
|
||||||
|
*
|
||||||
|
* The function handles splitting and merging existing gaps to
|
||||||
|
* maintain an optimal weight of cache entries.
|
||||||
|
*
|
||||||
|
* @param {string} firstKey - first key of the gap
|
||||||
|
* @param {string} lastKey - last key of the gap, must be greater
|
||||||
|
* or equal than 'firstKey'
|
||||||
|
* @param {number} weight - total weight between 'firstKey' and 'lastKey'
|
||||||
|
* @return {GapSetEntry} - existing or new gap entry
|
||||||
|
*/
|
||||||
|
setGap(firstKey: string, lastKey: string, weight: number): GapSetEntry {
|
||||||
|
assert(lastKey >= firstKey);
|
||||||
|
|
||||||
|
// Step 1/4: Find the closest left-overlapping gap, and either re-use it
|
||||||
|
// or chain it with a new gap depending on the weights if it exists (otherwise
|
||||||
|
// just creates a new gap).
|
||||||
|
const curGapIt = this._gaps.reverseLowerBound(<GapSetEntry>{ firstKey });
|
||||||
|
let curGap;
|
||||||
|
if (curGapIt.isAccessible()) {
|
||||||
|
curGap = curGapIt.pointer;
|
||||||
|
if (curGap.lastKey >= lastKey) {
|
||||||
|
// return fully overlapping gap already cached
|
||||||
|
return curGap;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let remainingWeight = weight;
|
||||||
|
if (!curGap // no previous gap
|
||||||
|
|| curGap.lastKey < firstKey // previous gap not overlapping
|
||||||
|
|| (curGap.lastKey === firstKey // previous gap overlapping by one key...
|
||||||
|
&& curGap.weight + weight > this._maxWeight) // ...but we can't extend it
|
||||||
|
) {
|
||||||
|
// create a new gap indexed by 'firstKey'
|
||||||
|
curGap = { firstKey, lastKey: firstKey, weight: 0 };
|
||||||
|
this._gaps.insert(curGap);
|
||||||
|
} else if (curGap.lastKey > firstKey && weight > this._maxWeight) {
|
||||||
|
// previous gap is either fully or partially contained in the new gap
|
||||||
|
// and cannot be extended: substract its weight from the total (heuristic
|
||||||
|
// in case the previous gap doesn't start at 'firstKey', which is the
|
||||||
|
// uncommon case)
|
||||||
|
remainingWeight -= curGap.weight;
|
||||||
|
|
||||||
|
// there may be an existing chained gap starting with the previous gap's
|
||||||
|
// 'lastKey': use it if it exists
|
||||||
|
const chainedGapIt = this._gaps.find(<GapSetEntry>{ firstKey: curGap.lastKey });
|
||||||
|
if (chainedGapIt.isAccessible()) {
|
||||||
|
curGap = chainedGapIt.pointer;
|
||||||
|
} else {
|
||||||
|
// no existing chained gap: chain a new gap to the previous gap
|
||||||
|
curGap = {
|
||||||
|
firstKey: curGap.lastKey,
|
||||||
|
lastKey: curGap.lastKey,
|
||||||
|
weight: 0,
|
||||||
|
};
|
||||||
|
this._gaps.insert(curGap);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Step 2/4: Cleanup existing gaps fully included in firstKey -> lastKey, and
|
||||||
|
// aggregate their weights in curGap to define the minimum weight up to the
|
||||||
|
// last merged gap.
|
||||||
|
let nextGap;
|
||||||
|
while (true) {
|
||||||
|
const nextGapIt = this._gaps.upperBound(<GapSetEntry>{ firstKey: curGap.firstKey });
|
||||||
|
nextGap = nextGapIt.isAccessible() && nextGapIt.pointer;
|
||||||
|
// stop the cleanup when no more gap or if the next gap is not fully
|
||||||
|
// included in curGap
|
||||||
|
if (!nextGap || nextGap.lastKey > lastKey) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
this._gaps.eraseElementByIterator(nextGapIt);
|
||||||
|
curGap.lastKey = nextGap.lastKey;
|
||||||
|
curGap.weight += nextGap.weight;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 3/4: Extend curGap to lastKey, adjusting the weight.
|
||||||
|
// At this point, curGap weight is the minimum weight of the finished gap, save it
|
||||||
|
// for step 4.
|
||||||
|
let minMergedWeight = curGap.weight;
|
||||||
|
if (curGap.lastKey === firstKey && firstKey !== lastKey) {
|
||||||
|
// extend the existing gap by the full amount 'firstKey -> lastKey'
|
||||||
|
curGap.lastKey = lastKey;
|
||||||
|
curGap.weight += remainingWeight;
|
||||||
|
} else if (curGap.lastKey <= lastKey) {
|
||||||
|
curGap.lastKey = lastKey;
|
||||||
|
curGap.weight = remainingWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 4/4: Find the closest right-overlapping gap, and if it exists, either merge
|
||||||
|
// it or chain it with curGap depending on the weights.
|
||||||
|
if (nextGap && nextGap.firstKey <= lastKey) {
|
||||||
|
// nextGap overlaps with the new gap: check if we can merge it
|
||||||
|
minMergedWeight += nextGap.weight;
|
||||||
|
let mergedWeight;
|
||||||
|
if (lastKey === nextGap.firstKey) {
|
||||||
|
// nextGap is chained with curGap: add the full weight of nextGap
|
||||||
|
mergedWeight = curGap.weight + nextGap.weight;
|
||||||
|
} else {
|
||||||
|
// strict overlap: don't add nextGap's weight unless
|
||||||
|
// it's larger than the sum of merged ranges (as it is
|
||||||
|
// then included in `minMergedWeight`)
|
||||||
|
mergedWeight = Math.max(curGap.weight, minMergedWeight);
|
||||||
|
}
|
||||||
|
if (mergedWeight <= this._maxWeight) {
|
||||||
|
// merge nextGap into curGap
|
||||||
|
curGap.lastKey = nextGap.lastKey;
|
||||||
|
curGap.weight = mergedWeight;
|
||||||
|
this._gaps.eraseElementByKey(nextGap);
|
||||||
|
} else {
|
||||||
|
// adjust the last key to chain with nextGap and substract the next
|
||||||
|
// gap's weight from curGap (heuristic)
|
||||||
|
curGap.lastKey = nextGap.firstKey;
|
||||||
|
curGap.weight = Math.max(mergedWeight - nextGap.weight, 0);
|
||||||
|
curGap = nextGap;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// return a copy of curGap
|
||||||
|
return Object.assign({}, curGap);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove gaps that overlap with one or more keys in a given array or
|
||||||
|
* OrderedSet. Used to invalidate gaps when keys are inserted or deleted.
|
||||||
|
*
|
||||||
|
* @param {OrderedSet<string> | string[]} overlappingKeys - remove gaps that overlap
|
||||||
|
* with any of this set of keys
|
||||||
|
* @return {number} - how many gaps were removed
|
||||||
|
*/
|
||||||
|
removeOverlappingGaps(overlappingKeys: OrderedSet<string> | string[]): number {
|
||||||
|
// To optimize processing with a large number of keys and/or gaps, this function:
|
||||||
|
//
|
||||||
|
// 1. converts the overlappingKeys array to a OrderedSet (if not already a OrderedSet)
|
||||||
|
// 2. queries both the gaps set and the overlapping keys set in a loop, which allows:
|
||||||
|
// - skipping ranges of overlapping keys at once when there is no new overlapping gap
|
||||||
|
// - skipping ranges of gaps at once when there is no overlapping key
|
||||||
|
//
|
||||||
|
// This way, it is efficient when the number of non-overlapping gaps is large
|
||||||
|
// (which is the most common case in practice).
|
||||||
|
|
||||||
|
let overlappingKeysSet;
|
||||||
|
if (Array.isArray(overlappingKeys)) {
|
||||||
|
overlappingKeysSet = new OrderedSet(overlappingKeys);
|
||||||
|
} else {
|
||||||
|
overlappingKeysSet = overlappingKeys;
|
||||||
|
}
|
||||||
|
const firstKeyIt = overlappingKeysSet.begin();
|
||||||
|
let currentKey = firstKeyIt.isAccessible() && firstKeyIt.pointer;
|
||||||
|
let nRemoved = 0;
|
||||||
|
while (currentKey) {
|
||||||
|
const closestGapIt = this._gaps.reverseUpperBound(<GapSetEntry>{ firstKey: currentKey });
|
||||||
|
if (closestGapIt.isAccessible()) {
|
||||||
|
const closestGap = closestGapIt.pointer;
|
||||||
|
if (currentKey <= closestGap.lastKey) {
|
||||||
|
// currentKey overlaps closestGap: remove the gap
|
||||||
|
this._gaps.eraseElementByIterator(closestGapIt);
|
||||||
|
nRemoved += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const nextGapIt = this._gaps.lowerBound(<GapSetEntry>{ firstKey: currentKey });
|
||||||
|
if (!nextGapIt.isAccessible()) {
|
||||||
|
// no more gap: we're done
|
||||||
|
return nRemoved;
|
||||||
|
}
|
||||||
|
const nextGap = nextGapIt.pointer;
|
||||||
|
// advance to the last key potentially overlapping with nextGap
|
||||||
|
let currentKeyIt = overlappingKeysSet.reverseLowerBound(nextGap.lastKey);
|
||||||
|
if (currentKeyIt.isAccessible()) {
|
||||||
|
currentKey = currentKeyIt.pointer;
|
||||||
|
if (currentKey >= nextGap.firstKey) {
|
||||||
|
// currentKey overlaps nextGap: remove the gap
|
||||||
|
this._gaps.eraseElementByIterator(nextGapIt);
|
||||||
|
nRemoved += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// advance to the first key potentially overlapping with another gap
|
||||||
|
currentKeyIt = overlappingKeysSet.lowerBound(nextGap.lastKey);
|
||||||
|
currentKey = currentKeyIt.isAccessible() && currentKeyIt.pointer;
|
||||||
|
}
|
||||||
|
return nRemoved;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Internal helper to coalesce multiple chained gaps into a single gap.
|
||||||
|
*
|
||||||
|
* It is only used to construct lookupGap() return values and
|
||||||
|
* doesn't modify the GapSet.
|
||||||
|
*
|
||||||
|
* NOTE: The function may take a noticeable amount of time and CPU
|
||||||
|
* to execute if a large number of chained gaps have to be
|
||||||
|
* coalesced, but it should never take more than a few seconds. In
|
||||||
|
* most cases it should take less than a millisecond. It regularly
|
||||||
|
* yields to the nodejs event loop to avoid blocking it during a
|
||||||
|
* long execution.
|
||||||
|
*
|
||||||
|
* @param {GapSetEntry} firstGap - first gap of the chain to coalesce with
|
||||||
|
* the next ones in the chain
|
||||||
|
* @return {Promise<GapSetEntry>} - a new coalesced entry, as a Promise
|
||||||
|
*/
|
||||||
|
_coalesceGapChain(firstGap: GapSetEntry): Promise<GapSetEntry> {
|
||||||
|
return new Promise(resolve => {
|
||||||
|
const coalescedGap: GapSetEntry = Object.assign({}, firstGap);
|
||||||
|
const coalesceGapChainIteration = () => {
|
||||||
|
// efficiency trade-off: 100 iterations of log(N) complexity lookups should
|
||||||
|
// not block the event loop for too long
|
||||||
|
for (let opCounter = 0; opCounter < 100; ++opCounter) {
|
||||||
|
const chainedGapIt = this._gaps.find(
|
||||||
|
<GapSetEntry>{ firstKey: coalescedGap.lastKey });
|
||||||
|
if (!chainedGapIt.isAccessible()) {
|
||||||
|
// chain is complete
|
||||||
|
return resolve(coalescedGap);
|
||||||
|
}
|
||||||
|
const chainedGap = chainedGapIt.pointer;
|
||||||
|
if (chainedGap.firstKey === chainedGap.lastKey) {
|
||||||
|
// found a single-key gap: chain is complete
|
||||||
|
return resolve(coalescedGap);
|
||||||
|
}
|
||||||
|
coalescedGap.lastKey = chainedGap.lastKey;
|
||||||
|
coalescedGap.weight += chainedGap.weight;
|
||||||
|
}
|
||||||
|
// yield to the event loop before continuing the process
|
||||||
|
// of coalescing the gap chain
|
||||||
|
return process.nextTick(coalesceGapChainIteration);
|
||||||
|
};
|
||||||
|
coalesceGapChainIteration();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lookup the next gap that overlaps with [minKey, maxKey]. Internally chained
|
||||||
|
* gaps are coalesced in the response into a single contiguous large gap.
|
||||||
|
*
|
||||||
|
* @param {string} minKey - minimum key overlapping with the returned gap
|
||||||
|
* @param {string} [maxKey] - maximum key overlapping with the returned gap
|
||||||
|
* @return {Promise<GapSetEntry | null>} - result of the lookup if a gap
|
||||||
|
* was found, null otherwise, as a Promise
|
||||||
|
*/
|
||||||
|
async lookupGap(minKey: string, maxKey?: string): Promise<GapSetEntry | null> {
|
||||||
|
let firstGap: GapSetEntry | null = null;
|
||||||
|
const minGapIt = this._gaps.reverseLowerBound(<GapSetEntry>{ firstKey: minKey });
|
||||||
|
const minGap = minGapIt.isAccessible() && minGapIt.pointer;
|
||||||
|
if (minGap && minGap.lastKey >= minKey) {
|
||||||
|
firstGap = minGap;
|
||||||
|
} else {
|
||||||
|
const maxGapIt = this._gaps.upperBound(<GapSetEntry>{ firstKey: minKey });
|
||||||
|
const maxGap = maxGapIt.isAccessible() && maxGapIt.pointer;
|
||||||
|
if (maxGap && (maxKey === undefined || maxGap.firstKey <= maxKey)) {
|
||||||
|
firstGap = maxGap;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!firstGap) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return this._coalesceGapChain(firstGap);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the maximum weight setting for individual gaps.
|
||||||
|
*
|
||||||
|
* @return {number} - maximum weight of individual gaps
|
||||||
|
*/
|
||||||
|
get maxWeight(): number {
|
||||||
|
return this._maxWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the maximum weight setting for individual gaps.
|
||||||
|
*
|
||||||
|
* @param {number} gapWeight - maximum weight of individual gaps
|
||||||
|
*/
|
||||||
|
set maxWeight(gapWeight: number) {
|
||||||
|
this._maxWeight = gapWeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the number of gaps stored in this set.
|
||||||
|
*
|
||||||
|
* @return {number} - number of gaps stored in this set
|
||||||
|
*/
|
||||||
|
get size(): number {
|
||||||
|
return this._gaps.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Iterate over each gap of the set, ordered by first key
|
||||||
|
*
|
||||||
|
* @return {Iterator<GapSetEntry>} - an iterator over all gaps
|
||||||
|
* Example:
|
||||||
|
* for (const gap of myGapSet) { ... }
|
||||||
|
*/
|
||||||
|
[Symbol.iterator](): Iterator<GapSetEntry> {
|
||||||
|
return this._gaps[Symbol.iterator]();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return an array containing all gaps, ordered by first key
|
||||||
|
*
|
||||||
|
* NOTE: there is a toArray() method in the OrderedSet implementation
|
||||||
|
* but it does not scale well and overflows the stack quickly. This is
|
||||||
|
* why we provide an implementation based on an iterator.
|
||||||
|
*
|
||||||
|
* @return {GapSetEntry[]} - an array containing all gaps
|
||||||
|
*/
|
||||||
|
toArray(): GapSetEntry[] {
|
||||||
|
return [...this];
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,124 @@
|
||||||
|
export enum HeapOrder {
|
||||||
|
Min = -1,
|
||||||
|
Max = 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum CompareResult {
|
||||||
|
LT = -1,
|
||||||
|
EQ = 0,
|
||||||
|
GT = 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
export type CompareFunction = (x: any, y: any) => CompareResult;
|
||||||
|
|
||||||
|
export class Heap {
|
||||||
|
size: number;
|
||||||
|
_maxSize: number;
|
||||||
|
_order: HeapOrder;
|
||||||
|
_heap: any[];
|
||||||
|
_cmpFn: CompareFunction;
|
||||||
|
|
||||||
|
constructor(size: number, order: HeapOrder, cmpFn: CompareFunction) {
|
||||||
|
this.size = 0;
|
||||||
|
this._maxSize = size;
|
||||||
|
this._order = order;
|
||||||
|
this._cmpFn = cmpFn;
|
||||||
|
this._heap = new Array<any>(this._maxSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
_parent(i: number): number {
|
||||||
|
return Math.floor((i - 1) / 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
_left(i: number): number {
|
||||||
|
return Math.floor((2 * i) + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
_right(i: number): number {
|
||||||
|
return Math.floor((2 * i) + 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
_shouldSwap(childIdx: number, parentIdx: number): boolean {
|
||||||
|
return this._cmpFn(this._heap[childIdx], this._heap[parentIdx]) as number === this._order as number;
|
||||||
|
}
|
||||||
|
|
||||||
|
_swap(i: number, j: number) {
|
||||||
|
const tmp = this._heap[i];
|
||||||
|
this._heap[i] = this._heap[j];
|
||||||
|
this._heap[j] = tmp;
|
||||||
|
}
|
||||||
|
|
||||||
|
_heapify(i: number) {
|
||||||
|
const l = this._left(i);
|
||||||
|
const r = this._right(i);
|
||||||
|
let c = i;
|
||||||
|
|
||||||
|
if (l < this.size && this._shouldSwap(l, c)) {
|
||||||
|
c = l;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (r < this.size && this._shouldSwap(r, c)) {
|
||||||
|
c = r;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (c != i) {
|
||||||
|
this._swap(c, i);
|
||||||
|
this._heapify(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
add(item: any): any {
|
||||||
|
if (this.size >= this._maxSize) {
|
||||||
|
return new Error('Max heap size reached');
|
||||||
|
}
|
||||||
|
|
||||||
|
++this.size;
|
||||||
|
let c = this.size - 1;
|
||||||
|
this._heap[c] = item;
|
||||||
|
|
||||||
|
while (c > 0) {
|
||||||
|
if (!this._shouldSwap(c, this._parent(c))) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
this._swap(c, this._parent(c));
|
||||||
|
c = this._parent(c);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
remove(): any {
|
||||||
|
if (this.size <= 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const ret = this._heap[0];
|
||||||
|
this._heap[0] = this._heap[this.size - 1];
|
||||||
|
this._heapify(0);
|
||||||
|
--this.size;
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
};
|
||||||
|
|
||||||
|
peek(): any {
|
||||||
|
if (this.size <= 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return this._heap[0];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export class MinHeap extends Heap {
|
||||||
|
constructor(size: number, cmpFn: CompareFunction) {
|
||||||
|
super(size, HeapOrder.Min, cmpFn);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class MaxHeap extends Heap {
|
||||||
|
constructor(size: number, cmpFn: CompareFunction) {
|
||||||
|
super(size, HeapOrder.Max, cmpFn);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
const { FILTER_SKIP, SKIP_NONE } = require('./tools');
|
const { FILTER_ACCEPT, SKIP_NONE } = require('./tools');
|
||||||
|
|
||||||
// Use a heuristic to amortize the cost of JSON
|
// Use a heuristic to amortize the cost of JSON
|
||||||
// serialization/deserialization only on largest metadata where the
|
// serialization/deserialization only on largest metadata where the
|
||||||
|
@ -92,21 +92,26 @@ class Extension {
|
||||||
* @param {object} entry - a listing entry from metadata
|
* @param {object} entry - a listing entry from metadata
|
||||||
* expected format: { key, value }
|
* expected format: { key, value }
|
||||||
* @return {number} - result of filtering the entry:
|
* @return {number} - result of filtering the entry:
|
||||||
* > 0: entry is accepted and included in the result
|
* FILTER_ACCEPT: entry is accepted and may or not be included
|
||||||
* = 0: entry is accepted but not included (skipping)
|
* in the result
|
||||||
* < 0: entry is not accepted, listing should finish
|
* FILTER_SKIP: listing may skip directly (with "gte" param) to
|
||||||
|
* the key returned by the skipping() method
|
||||||
|
* FILTER_END: the results are complete, listing can be stopped
|
||||||
*/
|
*/
|
||||||
filter(entry) {
|
filter(/* entry: { key, value } */) {
|
||||||
return entry ? FILTER_SKIP : FILTER_SKIP;
|
return FILTER_ACCEPT;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Provides the insight into why filter is skipping an entry. This could be
|
* Provides the next key at which the listing task is allowed to skip to.
|
||||||
* because it is skipping a range of delimited keys or a range of specific
|
* This could allow to skip over:
|
||||||
* version when doing master version listing.
|
* - a key prefix ending with the delimiter
|
||||||
|
* - all remaining versions of an object when doing a current
|
||||||
|
* versions listing in v0 format
|
||||||
|
* - a cached "gap" of deleted objects when doing a current
|
||||||
|
* versions listing in v0 format
|
||||||
*
|
*
|
||||||
* @return {string} - the insight: a common prefix or a master key,
|
* @return {string} - the next key at which the listing task is allowed to skip to
|
||||||
* or SKIP_NONE if there is no insight
|
|
||||||
*/
|
*/
|
||||||
skipping() {
|
skipping() {
|
||||||
return SKIP_NONE;
|
return SKIP_NONE;
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
|
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
|
||||||
FILTER_END, FILTER_ACCEPT } = require('./tools');
|
FILTER_END, FILTER_ACCEPT, SKIP_NONE } = require('./tools');
|
||||||
const DEFAULT_MAX_KEYS = 1000;
|
const DEFAULT_MAX_KEYS = 1000;
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
@ -163,7 +163,7 @@ class MultipartUploads {
|
||||||
}
|
}
|
||||||
|
|
||||||
skipping() {
|
skipping() {
|
||||||
return '';
|
return SKIP_NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
const Extension = require('./Extension').default;
|
const Extension = require('./Extension').default;
|
||||||
|
|
||||||
const { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
|
const { checkLimit, FILTER_END, FILTER_ACCEPT } = require('./tools');
|
||||||
const DEFAULT_MAX_KEYS = 10000;
|
const DEFAULT_MAX_KEYS = 10000;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -91,7 +91,7 @@ class List extends Extension {
|
||||||
* < 0 : listing done
|
* < 0 : listing done
|
||||||
*/
|
*/
|
||||||
filter(elem) {
|
filter(elem) {
|
||||||
// Check first in case of maxkeys <= 0
|
// Check if the result array is full
|
||||||
if (this.keys >= this.maxKeys) {
|
if (this.keys >= this.maxKeys) {
|
||||||
return FILTER_END;
|
return FILTER_END;
|
||||||
}
|
}
|
||||||
|
@ -99,7 +99,7 @@ class List extends Extension {
|
||||||
this.filterKeyStartsWith !== undefined) &&
|
this.filterKeyStartsWith !== undefined) &&
|
||||||
typeof elem === 'object' &&
|
typeof elem === 'object' &&
|
||||||
!this.customFilter(elem.value)) {
|
!this.customFilter(elem.value)) {
|
||||||
return FILTER_SKIP;
|
return FILTER_ACCEPT;
|
||||||
}
|
}
|
||||||
if (typeof elem === 'object') {
|
if (typeof elem === 'object') {
|
||||||
this.res.push({
|
this.res.push({
|
||||||
|
|
|
@ -1,274 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const Extension = require('./Extension').default;
|
|
||||||
const { inc, listingParamsMasterKeysV0ToV1,
|
|
||||||
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
|
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Find the common prefix in the path
|
|
||||||
*
|
|
||||||
* @param {String} key - path of the object
|
|
||||||
* @param {String} delimiter - separator
|
|
||||||
* @param {Number} delimiterIndex - 'folder' index in the path
|
|
||||||
* @return {String} - CommonPrefix
|
|
||||||
*/
|
|
||||||
function getCommonPrefix(key, delimiter, delimiterIndex) {
|
|
||||||
return key.substring(0, delimiterIndex + delimiter.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters
|
|
||||||
*
|
|
||||||
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
|
||||||
* @prop {String[]} Contents - 'files' to list
|
|
||||||
* @prop {Boolean} IsTruncated - truncated listing flag
|
|
||||||
* @prop {String|undefined} NextMarker - marker per amazon format
|
|
||||||
* @prop {Number} keys - count of listed keys
|
|
||||||
* @prop {String|undefined} delimiter - separator per amazon format
|
|
||||||
* @prop {String|undefined} prefix - prefix per amazon format
|
|
||||||
* @prop {Number} maxKeys - number of keys to list
|
|
||||||
*/
|
|
||||||
class Delimiter extends Extension {
|
|
||||||
/**
|
|
||||||
* Create a new Delimiter instance
|
|
||||||
* @constructor
|
|
||||||
* @param {Object} parameters - listing parameters
|
|
||||||
* @param {String} [parameters.delimiter] - delimiter per amazon
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.prefix] - prefix per amazon
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.marker] - marker per amazon
|
|
||||||
* format
|
|
||||||
* @param {Number} [parameters.maxKeys] - number of keys to list
|
|
||||||
* @param {Boolean} [parameters.v2] - indicates whether v2
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.startAfter] - marker per amazon
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.continuationToken] - obfuscated amazon
|
|
||||||
* token
|
|
||||||
* @param {Boolean} [parameters.alphabeticalOrder] - Either the result is
|
|
||||||
* alphabetically ordered
|
|
||||||
* or not
|
|
||||||
* @param {RequestLogger} logger - The logger of the
|
|
||||||
* request
|
|
||||||
* @param {String} [vFormat] - versioning key format
|
|
||||||
*/
|
|
||||||
constructor(parameters, logger, vFormat) {
|
|
||||||
super(parameters, logger);
|
|
||||||
// original listing parameters
|
|
||||||
this.delimiter = parameters.delimiter;
|
|
||||||
this.prefix = parameters.prefix;
|
|
||||||
this.marker = parameters.marker;
|
|
||||||
this.maxKeys = parameters.maxKeys || 1000;
|
|
||||||
this.startAfter = parameters.startAfter;
|
|
||||||
this.continuationToken = parameters.continuationToken;
|
|
||||||
this.alphabeticalOrder =
|
|
||||||
typeof parameters.alphabeticalOrder !== 'undefined' ?
|
|
||||||
parameters.alphabeticalOrder : true;
|
|
||||||
|
|
||||||
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
|
||||||
// results
|
|
||||||
this.CommonPrefixes = [];
|
|
||||||
this.Contents = [];
|
|
||||||
this.IsTruncated = false;
|
|
||||||
this.NextMarker = parameters.marker;
|
|
||||||
this.NextContinuationToken =
|
|
||||||
parameters.continuationToken || parameters.startAfter;
|
|
||||||
|
|
||||||
this.startMarker = parameters.v2 ? 'startAfter' : 'marker';
|
|
||||||
this.continueMarker = parameters.v2 ? 'continuationToken' : 'marker';
|
|
||||||
this.nextContinueMarker = parameters.v2 ?
|
|
||||||
'NextContinuationToken' : 'NextMarker';
|
|
||||||
|
|
||||||
if (this.delimiter !== undefined &&
|
|
||||||
this[this.nextContinueMarker] !== undefined &&
|
|
||||||
this[this.nextContinueMarker].startsWith(this.prefix || '')) {
|
|
||||||
const nextDelimiterIndex =
|
|
||||||
this[this.nextContinueMarker].indexOf(this.delimiter,
|
|
||||||
this.prefix ? this.prefix.length : 0);
|
|
||||||
this[this.nextContinueMarker] =
|
|
||||||
this[this.nextContinueMarker].slice(0, nextDelimiterIndex +
|
|
||||||
this.delimiter.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
Object.assign(this, {
|
|
||||||
[BucketVersioningKeyFormat.v0]: {
|
|
||||||
genMDParams: this.genMDParamsV0,
|
|
||||||
getObjectKey: this.getObjectKeyV0,
|
|
||||||
skipping: this.skippingV0,
|
|
||||||
},
|
|
||||||
[BucketVersioningKeyFormat.v1]: {
|
|
||||||
genMDParams: this.genMDParamsV1,
|
|
||||||
getObjectKey: this.getObjectKeyV1,
|
|
||||||
skipping: this.skippingV1,
|
|
||||||
},
|
|
||||||
}[this.vFormat]);
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV0() {
|
|
||||||
const params = {};
|
|
||||||
if (this.prefix) {
|
|
||||||
params.gte = this.prefix;
|
|
||||||
params.lt = inc(this.prefix);
|
|
||||||
}
|
|
||||||
const startVal = this[this.continueMarker] || this[this.startMarker];
|
|
||||||
if (startVal) {
|
|
||||||
if (params.gte && params.gte > startVal) {
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
delete params.gte;
|
|
||||||
params.gt = startVal;
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV1() {
|
|
||||||
const params = this.genMDParamsV0();
|
|
||||||
return listingParamsMasterKeysV0ToV1(params);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* check if the max keys count has been reached and set the
|
|
||||||
* final state of the result if it is the case
|
|
||||||
* @return {Boolean} - indicates if the iteration has to stop
|
|
||||||
*/
|
|
||||||
_reachedMaxKeys() {
|
|
||||||
if (this.keys >= this.maxKeys) {
|
|
||||||
// In cases of maxKeys <= 0 -> IsTruncated = false
|
|
||||||
this.IsTruncated = this.maxKeys > 0;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a (key, value) tuple to the listing
|
|
||||||
* Set the NextMarker to the current key
|
|
||||||
* Increment the keys counter
|
|
||||||
* @param {String} key - The key to add
|
|
||||||
* @param {String} value - The value of the key
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
addContents(key, value) {
|
|
||||||
if (this._reachedMaxKeys()) {
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
this.Contents.push({ key, value: this.trimMetadata(value) });
|
|
||||||
this[this.nextContinueMarker] = key;
|
|
||||||
++this.keys;
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
getObjectKeyV0(obj) {
|
|
||||||
return obj.key;
|
|
||||||
}
|
|
||||||
|
|
||||||
getObjectKeyV1(obj) {
|
|
||||||
return obj.key.slice(DbPrefixes.Master.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter to apply on each iteration, based on:
|
|
||||||
* - prefix
|
|
||||||
* - delimiter
|
|
||||||
* - maxKeys
|
|
||||||
* The marker is being handled directly by levelDB
|
|
||||||
* @param {Object} obj - The key and value of the element
|
|
||||||
* @param {String} obj.key - The key of the element
|
|
||||||
* @param {String} obj.value - The value of the element
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
filter(obj) {
|
|
||||||
const key = this.getObjectKey(obj);
|
|
||||||
const value = obj.value;
|
|
||||||
if ((this.prefix && !key.startsWith(this.prefix))
|
|
||||||
|| (this.alphabeticalOrder
|
|
||||||
&& typeof this[this.nextContinueMarker] === 'string'
|
|
||||||
&& key <= this[this.nextContinueMarker])) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
if (this.delimiter) {
|
|
||||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
|
||||||
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
|
||||||
if (delimiterIndex === -1) {
|
|
||||||
return this.addContents(key, value);
|
|
||||||
}
|
|
||||||
return this.addCommonPrefix(key, delimiterIndex);
|
|
||||||
}
|
|
||||||
return this.addContents(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a Common Prefix in the list
|
|
||||||
* @param {String} key - object name
|
|
||||||
* @param {Number} index - after prefix starting point
|
|
||||||
* @return {Boolean} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
addCommonPrefix(key, index) {
|
|
||||||
const commonPrefix = getCommonPrefix(key, this.delimiter, index);
|
|
||||||
if (this.CommonPrefixes.indexOf(commonPrefix) === -1
|
|
||||||
&& this[this.nextContinueMarker] !== commonPrefix) {
|
|
||||||
if (this._reachedMaxKeys()) {
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
this.CommonPrefixes.push(commonPrefix);
|
|
||||||
this[this.nextContinueMarker] = commonPrefix;
|
|
||||||
++this.keys;
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* If repd happens to want to skip listing on a bucket in v0
|
|
||||||
* versioning key format, here is an idea.
|
|
||||||
*
|
|
||||||
* @return {string} - the present range (NextMarker) if repd believes
|
|
||||||
* that it's enough and should move on
|
|
||||||
*/
|
|
||||||
skippingV0() {
|
|
||||||
return this[this.nextContinueMarker];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* If repd happens to want to skip listing on a bucket in v1
|
|
||||||
* versioning key format, here is an idea.
|
|
||||||
*
|
|
||||||
* @return {string} - the present range (NextMarker) if repd believes
|
|
||||||
* that it's enough and should move on
|
|
||||||
*/
|
|
||||||
skippingV1() {
|
|
||||||
return DbPrefixes.Master + this[this.nextContinueMarker];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return an object containing all mandatory fields to use once the
|
|
||||||
* iteration is done, doesn't show a NextMarker field if the output
|
|
||||||
* isn't truncated
|
|
||||||
* @return {Object} - following amazon format
|
|
||||||
*/
|
|
||||||
result() {
|
|
||||||
/* NextMarker is only provided when delimiter is used.
|
|
||||||
* specified in v1 listing documentation
|
|
||||||
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
|
|
||||||
*/
|
|
||||||
const result = {
|
|
||||||
CommonPrefixes: this.CommonPrefixes,
|
|
||||||
Contents: this.Contents,
|
|
||||||
IsTruncated: this.IsTruncated,
|
|
||||||
Delimiter: this.delimiter,
|
|
||||||
};
|
|
||||||
if (this.parameters.v2) {
|
|
||||||
result.NextContinuationToken = this.IsTruncated
|
|
||||||
? this.NextContinuationToken : undefined;
|
|
||||||
} else {
|
|
||||||
result.NextMarker = (this.IsTruncated && this.delimiter)
|
|
||||||
? this.NextMarker : undefined;
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { Delimiter };
|
|
|
@ -0,0 +1,356 @@
|
||||||
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
|
const Extension = require('./Extension').default;
|
||||||
|
const { inc, listingParamsMasterKeysV0ToV1,
|
||||||
|
FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
||||||
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
|
||||||
|
export interface FilterState {
|
||||||
|
id: number,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface FilterReturnValue {
|
||||||
|
FILTER_ACCEPT,
|
||||||
|
FILTER_SKIP,
|
||||||
|
FILTER_END,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const enum DelimiterFilterStateId {
|
||||||
|
NotSkipping = 1,
|
||||||
|
SkippingPrefix = 2,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface DelimiterFilterState_NotSkipping extends FilterState {
|
||||||
|
id: DelimiterFilterStateId.NotSkipping,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface DelimiterFilterState_SkippingPrefix extends FilterState {
|
||||||
|
id: DelimiterFilterStateId.SkippingPrefix,
|
||||||
|
prefix: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type KeyHandler = (key: string, value: string) => FilterReturnValue;
|
||||||
|
|
||||||
|
export type ResultObject = {
|
||||||
|
CommonPrefixes: string[];
|
||||||
|
Contents: {
|
||||||
|
key: string;
|
||||||
|
value: string;
|
||||||
|
}[];
|
||||||
|
IsTruncated: boolean;
|
||||||
|
Delimiter ?: string;
|
||||||
|
NextMarker ?: string;
|
||||||
|
NextContinuationToken ?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters
|
||||||
|
*
|
||||||
|
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
||||||
|
* @prop {String[]} Contents - 'files' to list
|
||||||
|
* @prop {Boolean} IsTruncated - truncated listing flag
|
||||||
|
* @prop {String|undefined} NextMarker - marker per amazon format
|
||||||
|
* @prop {Number} keys - count of listed keys
|
||||||
|
* @prop {String|undefined} delimiter - separator per amazon format
|
||||||
|
* @prop {String|undefined} prefix - prefix per amazon format
|
||||||
|
* @prop {Number} maxKeys - number of keys to list
|
||||||
|
*/
|
||||||
|
export class Delimiter extends Extension {
|
||||||
|
|
||||||
|
state: FilterState;
|
||||||
|
keyHandlers: { [id: number]: KeyHandler };
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new Delimiter instance
|
||||||
|
* @constructor
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} [parameters.delimiter] - delimiter per amazon
|
||||||
|
* format
|
||||||
|
* @param {String} [parameters.prefix] - prefix per amazon
|
||||||
|
* format
|
||||||
|
* @param {String} [parameters.marker] - marker per amazon
|
||||||
|
* format
|
||||||
|
* @param {Number} [parameters.maxKeys] - number of keys to list
|
||||||
|
* @param {Boolean} [parameters.v2] - indicates whether v2
|
||||||
|
* format
|
||||||
|
* @param {String} [parameters.startAfter] - marker per amazon
|
||||||
|
* format
|
||||||
|
* @param {String} [parameters.continuationToken] - obfuscated amazon
|
||||||
|
* token
|
||||||
|
* @param {RequestLogger} logger - The logger of the
|
||||||
|
* request
|
||||||
|
* @param {String} [vFormat] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
super(parameters, logger);
|
||||||
|
// original listing parameters
|
||||||
|
this.delimiter = parameters.delimiter;
|
||||||
|
this.prefix = parameters.prefix;
|
||||||
|
this.maxKeys = parameters.maxKeys || 1000;
|
||||||
|
|
||||||
|
if (parameters.v2) {
|
||||||
|
this.marker = parameters.continuationToken || parameters.startAfter;
|
||||||
|
} else {
|
||||||
|
this.marker = parameters.marker;
|
||||||
|
}
|
||||||
|
this.nextMarker = this.marker;
|
||||||
|
|
||||||
|
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
||||||
|
// results
|
||||||
|
this.CommonPrefixes = [];
|
||||||
|
this.Contents = [];
|
||||||
|
this.IsTruncated = false;
|
||||||
|
this.keyHandlers = {};
|
||||||
|
|
||||||
|
Object.assign(this, {
|
||||||
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
|
genMDParams: this.genMDParamsV0,
|
||||||
|
getObjectKey: this.getObjectKeyV0,
|
||||||
|
skipping: this.skippingV0,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
|
genMDParams: this.genMDParamsV1,
|
||||||
|
getObjectKey: this.getObjectKeyV1,
|
||||||
|
skipping: this.skippingV1,
|
||||||
|
},
|
||||||
|
}[this.vFormat]);
|
||||||
|
|
||||||
|
// if there is a delimiter, we may skip ranges by prefix,
|
||||||
|
// hence using the NotSkippingPrefix flavor that checks the
|
||||||
|
// subprefix up to the delimiter for the NotSkipping state
|
||||||
|
if (this.delimiter) {
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NotSkippingPrefix.bind(this));
|
||||||
|
} else {
|
||||||
|
// listing without a delimiter never has to skip over any
|
||||||
|
// prefix -> use NeverSkipping flavor for the NotSkipping
|
||||||
|
// state
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NeverSkipping.bind(this));
|
||||||
|
}
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterFilterStateId.SkippingPrefix,
|
||||||
|
this.keyHandler_SkippingPrefix.bind(this));
|
||||||
|
|
||||||
|
this.state = <DelimiterFilterState_NotSkipping> {
|
||||||
|
id: DelimiterFilterStateId.NotSkipping,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV0() {
|
||||||
|
const params: { gt ?: string, gte ?: string, lt ?: string } = {};
|
||||||
|
if (this.prefix) {
|
||||||
|
params.gte = this.prefix;
|
||||||
|
params.lt = inc(this.prefix);
|
||||||
|
}
|
||||||
|
if (this.marker && this.delimiter) {
|
||||||
|
const commonPrefix = this.getCommonPrefix(this.marker);
|
||||||
|
if (commonPrefix) {
|
||||||
|
const afterPrefix = inc(commonPrefix);
|
||||||
|
if (!params.gte || afterPrefix > params.gte) {
|
||||||
|
params.gte = afterPrefix;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this.marker && (!params.gte || this.marker >= params.gte)) {
|
||||||
|
delete params.gte;
|
||||||
|
params.gt = this.marker;
|
||||||
|
}
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV1() {
|
||||||
|
const params = this.genMDParamsV0();
|
||||||
|
return listingParamsMasterKeysV0ToV1(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check if the max keys count has been reached and set the
|
||||||
|
* final state of the result if it is the case
|
||||||
|
* @return {Boolean} - indicates if the iteration has to stop
|
||||||
|
*/
|
||||||
|
_reachedMaxKeys(): boolean {
|
||||||
|
if (this.keys >= this.maxKeys) {
|
||||||
|
// In cases of maxKeys <= 0 -> IsTruncated = false
|
||||||
|
this.IsTruncated = this.maxKeys > 0;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a (key, value) tuple to the listing
|
||||||
|
* Set the NextMarker to the current key
|
||||||
|
* Increment the keys counter
|
||||||
|
* @param {String} key - The key to add
|
||||||
|
* @param {String} value - The value of the key
|
||||||
|
* @return {number} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
addContents(key: string, value: string): void {
|
||||||
|
this.Contents.push({ key, value: this.trimMetadata(value) });
|
||||||
|
++this.keys;
|
||||||
|
this.nextMarker = key;
|
||||||
|
}
|
||||||
|
|
||||||
|
getCommonPrefix(key: string): string | undefined {
|
||||||
|
if (!this.delimiter) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||||
|
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
||||||
|
if (delimiterIndex === -1) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return key.substring(0, delimiterIndex + this.delimiter.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a Common Prefix in the list
|
||||||
|
* @param {String} commonPrefix - common prefix to add
|
||||||
|
* @param {String} key - full key starting with commonPrefix
|
||||||
|
* @return {Boolean} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
addCommonPrefix(commonPrefix: string, key: string): void {
|
||||||
|
// add the new prefix to the list
|
||||||
|
this.CommonPrefixes.push(commonPrefix);
|
||||||
|
++this.keys;
|
||||||
|
this.nextMarker = commonPrefix;
|
||||||
|
}
|
||||||
|
|
||||||
|
addCommonPrefixOrContents(key: string, value: string): string | undefined {
|
||||||
|
// add the subprefix to the common prefixes if the key has the delimiter
|
||||||
|
const commonPrefix = this.getCommonPrefix(key);
|
||||||
|
if (commonPrefix) {
|
||||||
|
this.addCommonPrefix(commonPrefix, key);
|
||||||
|
return commonPrefix;
|
||||||
|
}
|
||||||
|
this.addContents(key, value);
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
getObjectKeyV0(obj: { key: string }): string {
|
||||||
|
return obj.key;
|
||||||
|
}
|
||||||
|
|
||||||
|
getObjectKeyV1(obj: { key: string }): string {
|
||||||
|
return obj.key.slice(DbPrefixes.Master.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter to apply on each iteration, based on:
|
||||||
|
* - prefix
|
||||||
|
* - delimiter
|
||||||
|
* - maxKeys
|
||||||
|
* The marker is being handled directly by levelDB
|
||||||
|
* @param {Object} obj - The key and value of the element
|
||||||
|
* @param {String} obj.key - The key of the element
|
||||||
|
* @param {String} obj.value - The value of the element
|
||||||
|
* @return {number} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
filter(obj: { key: string, value: string }): FilterReturnValue {
|
||||||
|
const key = this.getObjectKey(obj);
|
||||||
|
const value = obj.value;
|
||||||
|
|
||||||
|
return this.handleKey(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
setState(state: FilterState): void {
|
||||||
|
this.state = state;
|
||||||
|
}
|
||||||
|
|
||||||
|
setKeyHandler(stateId: number, keyHandler: KeyHandler): void {
|
||||||
|
this.keyHandlers[stateId] = keyHandler;
|
||||||
|
}
|
||||||
|
|
||||||
|
handleKey(key: string, value: string): FilterReturnValue {
|
||||||
|
return this.keyHandlers[this.state.id](key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NeverSkipping(key: string, value: string): FilterReturnValue {
|
||||||
|
if (this._reachedMaxKeys()) {
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
this.addContents(key, value);
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NotSkippingPrefix(key: string, value: string): FilterReturnValue {
|
||||||
|
if (this._reachedMaxKeys()) {
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
const commonPrefix = this.addCommonPrefixOrContents(key, value);
|
||||||
|
if (commonPrefix) {
|
||||||
|
// transition into SkippingPrefix state to skip all following keys
|
||||||
|
// while they start with the same prefix
|
||||||
|
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
||||||
|
id: DelimiterFilterStateId.SkippingPrefix,
|
||||||
|
prefix: commonPrefix,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_SkippingPrefix(key: string, value: string): FilterReturnValue {
|
||||||
|
const { prefix } = <DelimiterFilterState_SkippingPrefix> this.state;
|
||||||
|
if (key.startsWith(prefix)) {
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
this.setState(<DelimiterFilterState_NotSkipping> {
|
||||||
|
id: DelimiterFilterStateId.NotSkipping,
|
||||||
|
});
|
||||||
|
return this.handleKey(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingBase(): string | undefined {
|
||||||
|
switch (this.state.id) {
|
||||||
|
case DelimiterFilterStateId.SkippingPrefix:
|
||||||
|
const { prefix } = <DelimiterFilterState_SkippingPrefix> this.state;
|
||||||
|
return inc(prefix);
|
||||||
|
|
||||||
|
default:
|
||||||
|
return SKIP_NONE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingV0() {
|
||||||
|
return this.skippingBase();
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingV1() {
|
||||||
|
const skipTo = this.skippingBase();
|
||||||
|
if (skipTo === SKIP_NONE) {
|
||||||
|
return SKIP_NONE;
|
||||||
|
}
|
||||||
|
return DbPrefixes.Master + skipTo;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return an object containing all mandatory fields to use once the
|
||||||
|
* iteration is done, doesn't show a NextMarker field if the output
|
||||||
|
* isn't truncated
|
||||||
|
* @return {Object} - following amazon format
|
||||||
|
*/
|
||||||
|
result(): ResultObject {
|
||||||
|
/* NextMarker is only provided when delimiter is used.
|
||||||
|
* specified in v1 listing documentation
|
||||||
|
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
|
||||||
|
*/
|
||||||
|
const result: ResultObject = {
|
||||||
|
CommonPrefixes: this.CommonPrefixes,
|
||||||
|
Contents: this.Contents,
|
||||||
|
IsTruncated: this.IsTruncated,
|
||||||
|
Delimiter: this.delimiter,
|
||||||
|
};
|
||||||
|
if (this.parameters.v2) {
|
||||||
|
result.NextContinuationToken = this.IsTruncated
|
||||||
|
? this.nextMarker : undefined;
|
||||||
|
} else {
|
||||||
|
result.NextMarker = (this.IsTruncated && this.delimiter)
|
||||||
|
? this.nextMarker : undefined;
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,127 @@
|
||||||
|
const { DelimiterMaster } = require('./delimiterMaster');
|
||||||
|
const { FILTER_ACCEPT, FILTER_END } = require('./tools');
|
||||||
|
|
||||||
|
type ResultObject = {
|
||||||
|
Contents: {
|
||||||
|
key: string;
|
||||||
|
value: string;
|
||||||
|
}[];
|
||||||
|
IsTruncated: boolean;
|
||||||
|
NextMarker ?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters. This extends the base class DelimiterMaster
|
||||||
|
* to return the master/current versions.
|
||||||
|
*/
|
||||||
|
class DelimiterCurrent extends DelimiterMaster {
|
||||||
|
/**
|
||||||
|
* Delimiter listing of current versions.
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} parameters.beforeDate - limit the response to keys older than beforeDate
|
||||||
|
* @param {String} parameters.excludedDataStoreName - excluded datatore name
|
||||||
|
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
||||||
|
* @param {RequestLogger} logger - The logger of the request
|
||||||
|
* @param {String} [vFormat] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
super(parameters, logger, vFormat);
|
||||||
|
|
||||||
|
this.beforeDate = parameters.beforeDate;
|
||||||
|
this.excludedDataStoreName = parameters.excludedDataStoreName;
|
||||||
|
this.maxScannedLifecycleListingEntries = parameters.maxScannedLifecycleListingEntries;
|
||||||
|
this.scannedKeys = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV0() {
|
||||||
|
const params = super.genMDParamsV0();
|
||||||
|
// lastModified and dataStoreName parameters are used by metadata that enables built-in filtering,
|
||||||
|
// a feature currently exclusive to MongoDB
|
||||||
|
if (this.beforeDate) {
|
||||||
|
params.lastModified = {
|
||||||
|
lt: this.beforeDate,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.excludedDataStoreName) {
|
||||||
|
params.dataStoreName = {
|
||||||
|
ne: this.excludedDataStoreName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the stringified entry's value.
|
||||||
|
* @param s - sringified value
|
||||||
|
* @return - undefined if parsing fails, otherwise it contains the parsed value.
|
||||||
|
*/
|
||||||
|
_parse(s) {
|
||||||
|
let p;
|
||||||
|
try {
|
||||||
|
p = JSON.parse(s);
|
||||||
|
} catch (e: any) {
|
||||||
|
this.logger.warn(
|
||||||
|
'Could not parse Object Metadata while listing',
|
||||||
|
{ err: e.toString() });
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check if the max keys count has been reached and set the
|
||||||
|
* final state of the result if it is the case
|
||||||
|
*
|
||||||
|
* specialized implementation on DelimiterCurrent to also check
|
||||||
|
* the number of scanned keys
|
||||||
|
*
|
||||||
|
* @return {Boolean} - indicates if the iteration has to stop
|
||||||
|
*/
|
||||||
|
_reachedMaxKeys(): boolean {
|
||||||
|
if (this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries) {
|
||||||
|
this.IsTruncated = true;
|
||||||
|
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
||||||
|
{
|
||||||
|
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
||||||
|
scannedKeys: this.scannedKeys,
|
||||||
|
});
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return super._reachedMaxKeys();
|
||||||
|
}
|
||||||
|
|
||||||
|
addContents(key, value) {
|
||||||
|
++this.scannedKeys;
|
||||||
|
const parsedValue = this._parse(value);
|
||||||
|
// if parsing fails, skip the key.
|
||||||
|
if (parsedValue) {
|
||||||
|
const lastModified = parsedValue['last-modified'];
|
||||||
|
const dataStoreName = parsedValue.dataStoreName;
|
||||||
|
// We then check if the current version is older than the "beforeDate" and
|
||||||
|
// "excludedDataStoreName" is not specified or if specified and the data store name is different.
|
||||||
|
if ((!this.beforeDate || (lastModified && lastModified < this.beforeDate)) &&
|
||||||
|
(!this.excludedDataStoreName || dataStoreName !== this.excludedDataStoreName)) {
|
||||||
|
super.addContents(key, value);
|
||||||
|
}
|
||||||
|
// In the event of a timeout occurring before any content is added,
|
||||||
|
// NextMarker is updated even if the object is not eligible.
|
||||||
|
// It minimizes the amount of data that the client needs to re-process if the request times out.
|
||||||
|
this.nextMarker = key;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result(): object {
|
||||||
|
const result: ResultObject = {
|
||||||
|
Contents: this.Contents,
|
||||||
|
IsTruncated: this.IsTruncated,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (this.IsTruncated) {
|
||||||
|
result.NextMarker = this.nextMarker;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
module.exports = { DelimiterCurrent };
|
|
@ -1,196 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const Delimiter = require('./delimiter').Delimiter;
|
|
||||||
const Version = require('../../versioning/Version').Version;
|
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
|
||||||
const { BucketVersioningKeyFormat } = VSConst;
|
|
||||||
const { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
|
||||||
|
|
||||||
const VID_SEP = VSConst.VersionId.Separator;
|
|
||||||
const { DbPrefixes } = VSConst;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters. This extends the base class Delimiter
|
|
||||||
* to return the raw master versions of existing objects.
|
|
||||||
*/
|
|
||||||
class DelimiterMaster extends Delimiter {
|
|
||||||
/**
|
|
||||||
* Delimiter listing of master versions.
|
|
||||||
* @param {Object} parameters - listing parameters
|
|
||||||
* @param {String} parameters.delimiter - delimiter per amazon format
|
|
||||||
* @param {String} parameters.prefix - prefix per amazon format
|
|
||||||
* @param {String} parameters.marker - marker per amazon format
|
|
||||||
* @param {Number} parameters.maxKeys - number of keys to list
|
|
||||||
* @param {Boolean} parameters.v2 - indicates whether v2 format
|
|
||||||
* @param {String} parameters.startAfter - marker per amazon v2 format
|
|
||||||
* @param {String} parameters.continuationToken - obfuscated amazon token
|
|
||||||
* @param {RequestLogger} logger - The logger of the request
|
|
||||||
* @param {String} [vFormat] - versioning key format
|
|
||||||
*/
|
|
||||||
constructor(parameters, logger, vFormat) {
|
|
||||||
super(parameters, logger, vFormat);
|
|
||||||
// non-PHD master version or a version whose master is a PHD version
|
|
||||||
this.prvKey = undefined;
|
|
||||||
this.prvPHDKey = undefined;
|
|
||||||
this.inReplayPrefix = false;
|
|
||||||
|
|
||||||
Object.assign(this, {
|
|
||||||
[BucketVersioningKeyFormat.v0]: {
|
|
||||||
filter: this.filterV0,
|
|
||||||
skipping: this.skippingV0,
|
|
||||||
},
|
|
||||||
[BucketVersioningKeyFormat.v1]: {
|
|
||||||
filter: this.filterV1,
|
|
||||||
skipping: this.skippingV1,
|
|
||||||
},
|
|
||||||
}[this.vFormat]);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter to apply on each iteration for buckets in v0 format,
|
|
||||||
* based on:
|
|
||||||
* - prefix
|
|
||||||
* - delimiter
|
|
||||||
* - maxKeys
|
|
||||||
* The marker is being handled directly by levelDB
|
|
||||||
* @param {Object} obj - The key and value of the element
|
|
||||||
* @param {String} obj.key - The key of the element
|
|
||||||
* @param {String} obj.value - The value of the element
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
filterV0(obj) {
|
|
||||||
let key = obj.key;
|
|
||||||
const value = obj.value;
|
|
||||||
|
|
||||||
if (key.startsWith(DbPrefixes.Replay)) {
|
|
||||||
this.inReplayPrefix = true;
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
this.inReplayPrefix = false;
|
|
||||||
|
|
||||||
/* Skip keys not starting with the prefix or not alphabetically
|
|
||||||
* ordered. */
|
|
||||||
if ((this.prefix && !key.startsWith(this.prefix))
|
|
||||||
|| (typeof this[this.nextContinueMarker] === 'string' &&
|
|
||||||
key <= this[this.nextContinueMarker])) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Skip version keys (<key><versionIdSeparator><version>) if we already
|
|
||||||
* have a master version. */
|
|
||||||
const versionIdIndex = key.indexOf(VID_SEP);
|
|
||||||
if (versionIdIndex >= 0) {
|
|
||||||
key = key.slice(0, versionIdIndex);
|
|
||||||
/* - key === this.prvKey is triggered when a master version has
|
|
||||||
* been accepted for this key,
|
|
||||||
* - key === this.NextMarker or this.NextContinueToken is triggered
|
|
||||||
* when a listing page ends on an accepted obj and the next page
|
|
||||||
* starts with a version of this object.
|
|
||||||
* In that case prvKey is default set to undefined
|
|
||||||
* in the constructor and comparing to NextMarker is the only
|
|
||||||
* way to know we should not accept this version. This test is
|
|
||||||
* not redundant with the one at the beginning of this function,
|
|
||||||
* we are comparing here the key without the version suffix,
|
|
||||||
* - key startsWith the previous NextMarker happens because we set
|
|
||||||
* NextMarker to the common prefix instead of the whole key
|
|
||||||
* value. (TODO: remove this test once ZENKO-1048 is fixed)
|
|
||||||
* */
|
|
||||||
if (key === this.prvKey || key === this[this.nextContinueMarker] ||
|
|
||||||
(this.delimiter &&
|
|
||||||
key.startsWith(this[this.nextContinueMarker]))) {
|
|
||||||
/* master version already filtered */
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (Version.isPHD(value)) {
|
|
||||||
/* master version is a PHD version, we want to wait for the next
|
|
||||||
* one:
|
|
||||||
* - Set the prvKey to undefined to not skip the next version,
|
|
||||||
* - return accept to avoid users to skip the next values in range
|
|
||||||
* (skip scan mechanism in metadata backend like Metadata or
|
|
||||||
* MongoClient). */
|
|
||||||
this.prvKey = undefined;
|
|
||||||
this.prvPHDKey = key;
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
if (Version.isDeleteMarker(value)) {
|
|
||||||
/* This entry is a deleteMarker which has not been filtered by the
|
|
||||||
* version test. Either :
|
|
||||||
* - it is a deleteMarker on the master version, we want to SKIP
|
|
||||||
* all the following entries with this key (no master version),
|
|
||||||
* - or a deleteMarker following a PHD (setting prvKey to undefined
|
|
||||||
* when an entry is a PHD avoids the skip on version for the
|
|
||||||
* next entry). In that case we expect the master version to
|
|
||||||
* follow. */
|
|
||||||
if (key === this.prvPHDKey) {
|
|
||||||
this.prvKey = undefined;
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
this.prvKey = key;
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.prvKey = key;
|
|
||||||
if (this.delimiter) {
|
|
||||||
// check if the key has the delimiter
|
|
||||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
|
||||||
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
|
||||||
if (delimiterIndex >= 0) {
|
|
||||||
// try to add the prefix to the list
|
|
||||||
return this.addCommonPrefix(key, delimiterIndex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return this.addContents(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter to apply on each iteration for buckets in v1 format,
|
|
||||||
* based on:
|
|
||||||
* - prefix
|
|
||||||
* - delimiter
|
|
||||||
* - maxKeys
|
|
||||||
* The marker is being handled directly by levelDB
|
|
||||||
* @param {Object} obj - The key and value of the element
|
|
||||||
* @param {String} obj.key - The key of the element
|
|
||||||
* @param {String} obj.value - The value of the element
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
filterV1(obj) {
|
|
||||||
// Filtering master keys in v1 is simply listing the master
|
|
||||||
// keys, as the state of version keys do not change the
|
|
||||||
// result, so we can use Delimiter method directly.
|
|
||||||
return super.filter(obj);
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingBase() {
|
|
||||||
if (this[this.nextContinueMarker]) {
|
|
||||||
// next marker or next continuation token:
|
|
||||||
// - foo/ : skipping foo/
|
|
||||||
// - foo : skipping foo.
|
|
||||||
const index = this[this.nextContinueMarker].
|
|
||||||
lastIndexOf(this.delimiter);
|
|
||||||
if (index === this[this.nextContinueMarker].length - 1) {
|
|
||||||
return this[this.nextContinueMarker];
|
|
||||||
}
|
|
||||||
return this[this.nextContinueMarker] + VID_SEP;
|
|
||||||
}
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingV0() {
|
|
||||||
if (this.inReplayPrefix) {
|
|
||||||
return DbPrefixes.Replay;
|
|
||||||
}
|
|
||||||
return this.skippingBase();
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingV1() {
|
|
||||||
const skipTo = this.skippingBase();
|
|
||||||
if (skipTo === SKIP_NONE) {
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
return DbPrefixes.Master + skipTo;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { DelimiterMaster };
|
|
|
@ -0,0 +1,620 @@
|
||||||
|
import {
|
||||||
|
Delimiter,
|
||||||
|
FilterState,
|
||||||
|
FilterReturnValue,
|
||||||
|
DelimiterFilterStateId,
|
||||||
|
DelimiterFilterState_NotSkipping,
|
||||||
|
DelimiterFilterState_SkippingPrefix,
|
||||||
|
ResultObject,
|
||||||
|
} from './delimiter';
|
||||||
|
const Version = require('../../versioning/Version').Version;
|
||||||
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
|
const { BucketVersioningKeyFormat } = VSConst;
|
||||||
|
const { FILTER_ACCEPT, FILTER_SKIP, FILTER_END, SKIP_NONE, inc } = require('./tools');
|
||||||
|
|
||||||
|
import { GapSetEntry } from '../cache/GapSet';
|
||||||
|
import { GapCacheInterface } from '../cache/GapCache';
|
||||||
|
|
||||||
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
|
const { DbPrefixes } = VSConst;
|
||||||
|
|
||||||
|
export const enum DelimiterMasterFilterStateId {
|
||||||
|
SkippingVersionsV0 = 101,
|
||||||
|
WaitVersionAfterPHDV0 = 102,
|
||||||
|
SkippingGapV0 = 103,
|
||||||
|
};
|
||||||
|
|
||||||
|
interface DelimiterMasterFilterState_SkippingVersionsV0 extends FilterState {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
masterKey: string,
|
||||||
|
};
|
||||||
|
|
||||||
|
interface DelimiterMasterFilterState_WaitVersionAfterPHDV0 extends FilterState {
|
||||||
|
id: DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
||||||
|
masterKey: string,
|
||||||
|
};
|
||||||
|
|
||||||
|
interface DelimiterMasterFilterState_SkippingGapV0 extends FilterState {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingGapV0,
|
||||||
|
};
|
||||||
|
|
||||||
|
export const enum GapCachingState {
|
||||||
|
NoGapCache = 0, // there is no gap cache
|
||||||
|
UnknownGap = 1, // waiting for a cache lookup
|
||||||
|
GapLookupInProgress = 2, // asynchronous gap lookup in progress
|
||||||
|
GapCached = 3, // an upcoming or already skippable gap is cached
|
||||||
|
NoMoreGap = 4, // the cache doesn't have any more gaps inside the listed range
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapCachingInfo_NoGapCache = {
|
||||||
|
state: GapCachingState.NoGapCache;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapCachingInfo_NoCachedGap = {
|
||||||
|
state: GapCachingState.UnknownGap
|
||||||
|
| GapCachingState.GapLookupInProgress
|
||||||
|
gapCache: GapCacheInterface;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapCachingInfo_GapCached = {
|
||||||
|
state: GapCachingState.GapCached;
|
||||||
|
gapCache: GapCacheInterface;
|
||||||
|
gapCached: GapSetEntry;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapCachingInfo_NoMoreGap = {
|
||||||
|
state: GapCachingState.NoMoreGap;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapCachingInfo = GapCachingInfo_NoGapCache
|
||||||
|
| GapCachingInfo_NoCachedGap
|
||||||
|
| GapCachingInfo_GapCached
|
||||||
|
| GapCachingInfo_NoMoreGap;
|
||||||
|
|
||||||
|
|
||||||
|
export const enum GapBuildingState {
|
||||||
|
Disabled = 0, // no gap cache or no gap building needed (e.g. in V1 versioning format)
|
||||||
|
NotBuilding = 1, // not currently building a gap (i.e. not listing within a gap)
|
||||||
|
Building = 2, // currently building a gap (i.e. listing within a gap)
|
||||||
|
Expired = 3, // not allowed to build due to exposure delay timeout
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapBuildingInfo_NothingToBuild = {
|
||||||
|
state: GapBuildingState.Disabled | GapBuildingState.Expired;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapBuildingParams = {
|
||||||
|
/**
|
||||||
|
* minimum weight for a gap to be created in the cache
|
||||||
|
*/
|
||||||
|
minGapWeight: number;
|
||||||
|
/**
|
||||||
|
* trigger a cache setGap() call every N skippable keys
|
||||||
|
*/
|
||||||
|
triggerSaveGapWeight: number;
|
||||||
|
/**
|
||||||
|
* timestamp to assess whether we're still inside the validity period to
|
||||||
|
* be allowed to build gaps
|
||||||
|
*/
|
||||||
|
initTimestamp: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapBuildingInfo_NotBuilding = {
|
||||||
|
state: GapBuildingState.NotBuilding;
|
||||||
|
gapCache: GapCacheInterface;
|
||||||
|
params: GapBuildingParams;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapBuildingInfo_Building = {
|
||||||
|
state: GapBuildingState.Building;
|
||||||
|
gapCache: GapCacheInterface;
|
||||||
|
params: GapBuildingParams;
|
||||||
|
/**
|
||||||
|
* Gap currently being created
|
||||||
|
*/
|
||||||
|
gap: GapSetEntry;
|
||||||
|
/**
|
||||||
|
* total current weight of the gap being created
|
||||||
|
*/
|
||||||
|
gapWeight: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GapBuildingInfo = GapBuildingInfo_NothingToBuild
|
||||||
|
| GapBuildingInfo_NotBuilding
|
||||||
|
| GapBuildingInfo_Building;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters. This extends the base class Delimiter
|
||||||
|
* to return the raw master versions of existing objects.
|
||||||
|
*/
|
||||||
|
export class DelimiterMaster extends Delimiter {
|
||||||
|
|
||||||
|
_gapCaching: GapCachingInfo;
|
||||||
|
_gapBuilding: GapBuildingInfo;
|
||||||
|
_refreshedBuildingParams: GapBuildingParams | null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delimiter listing of master versions.
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} [parameters.delimiter] - delimiter per amazon format
|
||||||
|
* @param {String} [parameters.prefix] - prefix per amazon format
|
||||||
|
* @param {String} [parameters.marker] - marker per amazon format
|
||||||
|
* @param {Number} [parameters.maxKeys] - number of keys to list
|
||||||
|
* @param {Boolean} [parameters.v2] - indicates whether v2 format
|
||||||
|
* @param {String} [parameters.startAfter] - marker per amazon v2 format
|
||||||
|
* @param {String} [parameters.continuationToken] - obfuscated amazon token
|
||||||
|
* @param {RequestLogger} logger - The logger of the request
|
||||||
|
* @param {String} [vFormat="v0"] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat?: string) {
|
||||||
|
super(parameters, logger, vFormat);
|
||||||
|
|
||||||
|
if (this.vFormat === BucketVersioningKeyFormat.v0) {
|
||||||
|
// override Delimiter's implementation of NotSkipping for
|
||||||
|
// DelimiterMaster logic (skipping versions and special
|
||||||
|
// handling of delete markers and PHDs)
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NotSkippingPrefixNorVersionsV0.bind(this));
|
||||||
|
|
||||||
|
// add extra state handlers specific to DelimiterMaster with v0 format
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
this.keyHandler_SkippingVersionsV0.bind(this));
|
||||||
|
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
||||||
|
this.keyHandler_WaitVersionAfterPHDV0.bind(this));
|
||||||
|
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterMasterFilterStateId.SkippingGapV0,
|
||||||
|
this.keyHandler_SkippingGapV0.bind(this));
|
||||||
|
|
||||||
|
if (this.marker) {
|
||||||
|
// distinct initial state to include some special logic
|
||||||
|
// before the first master key is found that does not have
|
||||||
|
// to be checked afterwards
|
||||||
|
this.state = <DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
masterKey: this.marker,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
this.state = <DelimiterFilterState_NotSkipping> {
|
||||||
|
id: DelimiterFilterStateId.NotSkipping,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// save base implementation of the `NotSkipping` state in
|
||||||
|
// Delimiter before overriding it with ours, to be able to call it from there
|
||||||
|
this.keyHandler_NotSkipping_Delimiter = this.keyHandlers[DelimiterFilterStateId.NotSkipping];
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NotSkippingPrefixNorVersionsV1.bind(this));
|
||||||
|
}
|
||||||
|
// in v1, we can directly use Delimiter's implementation,
|
||||||
|
// which is already set to the proper state
|
||||||
|
|
||||||
|
// default initialization of the gap cache and building states, can be
|
||||||
|
// set by refreshGapCache()
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.NoGapCache,
|
||||||
|
};
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.Disabled,
|
||||||
|
};
|
||||||
|
this._refreshedBuildingParams = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the validity period left before a refresh of the gap cache is needed
|
||||||
|
* to continue building new gaps.
|
||||||
|
*
|
||||||
|
* @return {number|null} one of:
|
||||||
|
* - the remaining time in milliseconds in which gaps can be added to the
|
||||||
|
* cache before a call to refreshGapCache() is required
|
||||||
|
* - or 0 if there is no time left and a call to refreshGapCache() is required
|
||||||
|
* to resume caching gaps
|
||||||
|
* - or null if refreshing the cache is never needed (because the gap cache
|
||||||
|
* is either not available or not used)
|
||||||
|
*/
|
||||||
|
getGapBuildingValidityPeriodMs(): number | null {
|
||||||
|
let gapBuilding;
|
||||||
|
switch (this._gapBuilding.state) {
|
||||||
|
case GapBuildingState.Disabled:
|
||||||
|
return null;
|
||||||
|
case GapBuildingState.Expired:
|
||||||
|
return 0;
|
||||||
|
case GapBuildingState.NotBuilding:
|
||||||
|
gapBuilding = <GapBuildingInfo_NotBuilding> this._gapBuilding;
|
||||||
|
break;
|
||||||
|
case GapBuildingState.Building:
|
||||||
|
gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
const { gapCache, params } = gapBuilding;
|
||||||
|
const elapsedTime = Date.now() - params.initTimestamp;
|
||||||
|
return Math.max(gapCache.exposureDelayMs - elapsedTime, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh the gaps caching logic (gaps are series of current delete markers
|
||||||
|
* in V0 bucket metadata format). It has two effects:
|
||||||
|
*
|
||||||
|
* - starts exposing existing and future gaps from the cache to efficiently
|
||||||
|
* skip over series of current delete markers that have been seen and cached
|
||||||
|
* earlier
|
||||||
|
*
|
||||||
|
* - enables building and caching new gaps (or extend existing ones), for a
|
||||||
|
* limited time period defined by the `gapCacheProxy.exposureDelayMs` value
|
||||||
|
* in milliseconds. To refresh the validity period and resume building and
|
||||||
|
* caching new gaps, one must restart a new listing from the database (starting
|
||||||
|
* at the current listing key, included), then call refreshGapCache() again.
|
||||||
|
*
|
||||||
|
* @param {GapCacheInterface} gapCacheProxy - API proxy to the gaps cache
|
||||||
|
* (the proxy should handle prefixing object keys with the bucket name)
|
||||||
|
* @param {number} [minGapWeight=100] - minimum weight of a gap for it to be
|
||||||
|
* added in the cache
|
||||||
|
* @param {number} [triggerSaveGapWeight] - cumulative weight to wait for
|
||||||
|
* before saving the current building gap. Cannot be greater than
|
||||||
|
* `gapCacheProxy.maxGapWeight` (the value is thresholded to `maxGapWeight`
|
||||||
|
* otherwise). Defaults to `gapCacheProxy.maxGapWeight / 2`.
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
refreshGapCache(
|
||||||
|
gapCacheProxy: GapCacheInterface,
|
||||||
|
minGapWeight?: number,
|
||||||
|
triggerSaveGapWeight?: number
|
||||||
|
): void {
|
||||||
|
if (this.vFormat !== BucketVersioningKeyFormat.v0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (this._gapCaching.state === GapCachingState.NoGapCache) {
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.UnknownGap,
|
||||||
|
gapCache: gapCacheProxy,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
const refreshedBuildingParams: GapBuildingParams = {
|
||||||
|
minGapWeight: minGapWeight || 100,
|
||||||
|
triggerSaveGapWeight: triggerSaveGapWeight
|
||||||
|
|| Math.trunc(gapCacheProxy.maxGapWeight / 2),
|
||||||
|
initTimestamp: Date.now(),
|
||||||
|
};
|
||||||
|
if (this._gapBuilding.state === GapBuildingState.Building) {
|
||||||
|
// refreshed params will be applied as soon as the current building gap is saved
|
||||||
|
this._refreshedBuildingParams = refreshedBuildingParams;
|
||||||
|
} else {
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.NotBuilding,
|
||||||
|
gapCache: gapCacheProxy,
|
||||||
|
params: refreshedBuildingParams,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trigger a lookup of the closest upcoming or already skippable gap.
|
||||||
|
*
|
||||||
|
* @param {string} fromKey - lookup a gap not before 'fromKey'
|
||||||
|
* @return {undefined} - the lookup is asynchronous and its
|
||||||
|
* response is handled inside this function
|
||||||
|
*/
|
||||||
|
_triggerGapLookup(gapCaching: GapCachingInfo_NoCachedGap, fromKey: string): void {
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.GapLookupInProgress,
|
||||||
|
gapCache: gapCaching.gapCache,
|
||||||
|
};
|
||||||
|
const maxKey = this.prefix ? inc(this.prefix) : undefined;
|
||||||
|
gapCaching.gapCache.lookupGap(fromKey, maxKey).then(_gap => {
|
||||||
|
const gap = <GapSetEntry | null> _gap;
|
||||||
|
if (gap) {
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.GapCached,
|
||||||
|
gapCache: gapCaching.gapCache,
|
||||||
|
gapCached: gap,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.NoMoreGap,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
_checkGapOnMasterDeleteMarker(key: string): FilterReturnValue {
|
||||||
|
switch (this._gapBuilding.state) {
|
||||||
|
case GapBuildingState.Disabled:
|
||||||
|
case GapBuildingState.Expired:
|
||||||
|
break;
|
||||||
|
case GapBuildingState.NotBuilding:
|
||||||
|
this._createBuildingGap(key, 1);
|
||||||
|
break;
|
||||||
|
case GapBuildingState.Building:
|
||||||
|
this._updateBuildingGap(key);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (this._gapCaching.state === GapCachingState.GapCached) {
|
||||||
|
const { gapCached } = this._gapCaching;
|
||||||
|
if (key >= gapCached.firstKey) {
|
||||||
|
if (key <= gapCached.lastKey) {
|
||||||
|
// we are inside the last looked up cached gap: transition to
|
||||||
|
// 'SkippingGapV0' state
|
||||||
|
this.setState(<DelimiterMasterFilterState_SkippingGapV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingGapV0,
|
||||||
|
});
|
||||||
|
// cut the current gap before skipping, it will be merged or
|
||||||
|
// chained with the existing one (depending on its weight)
|
||||||
|
if (this._gapBuilding.state === GapBuildingState.Building) {
|
||||||
|
// substract 1 from the weight because we are going to chain this gap,
|
||||||
|
// which has an overlap of one key.
|
||||||
|
this._gapBuilding.gap.weight -= 1;
|
||||||
|
this._cutBuildingGap();
|
||||||
|
}
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
// as we are past the cached gap, we will need another lookup
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.UnknownGap,
|
||||||
|
gapCache: this._gapCaching.gapCache,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this._gapCaching.state === GapCachingState.UnknownGap) {
|
||||||
|
this._triggerGapLookup(this._gapCaching, key);
|
||||||
|
}
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
filter_onNewMasterKeyV0(key: string, value: string): FilterReturnValue {
|
||||||
|
// if this master key is a delete marker, accept it without
|
||||||
|
// adding the version to the contents
|
||||||
|
if (Version.isDeleteMarker(value)) {
|
||||||
|
// update the state to start skipping versions of the new master key
|
||||||
|
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
masterKey: key,
|
||||||
|
});
|
||||||
|
return this._checkGapOnMasterDeleteMarker(key);
|
||||||
|
}
|
||||||
|
if (Version.isPHD(value)) {
|
||||||
|
// master version is a PHD version: wait for the first
|
||||||
|
// following version that will be considered as the actual
|
||||||
|
// master key
|
||||||
|
this.setState(<DelimiterMasterFilterState_WaitVersionAfterPHDV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
||||||
|
masterKey: key,
|
||||||
|
});
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
// cut the current gap as soon as a non-deleted entry is seen
|
||||||
|
this._cutBuildingGap();
|
||||||
|
|
||||||
|
if (key.startsWith(DbPrefixes.Replay)) {
|
||||||
|
// skip internal replay prefix entirely
|
||||||
|
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
||||||
|
id: DelimiterFilterStateId.SkippingPrefix,
|
||||||
|
prefix: DbPrefixes.Replay,
|
||||||
|
});
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
if (this._reachedMaxKeys()) {
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
|
||||||
|
const commonPrefix = this.addCommonPrefixOrContents(key, value);
|
||||||
|
if (commonPrefix) {
|
||||||
|
// transition into SkippingPrefix state to skip all following keys
|
||||||
|
// while they start with the same prefix
|
||||||
|
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
||||||
|
id: DelimiterFilterStateId.SkippingPrefix,
|
||||||
|
prefix: commonPrefix,
|
||||||
|
});
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
// update the state to start skipping versions of the new master key
|
||||||
|
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
masterKey: key,
|
||||||
|
});
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NotSkippingPrefixNorVersionsV0(key: string, value: string): FilterReturnValue {
|
||||||
|
return this.filter_onNewMasterKeyV0(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
filter_onNewMasterKeyV1(key: string, value: string): FilterReturnValue {
|
||||||
|
// if this master key is a delete marker, accept it without
|
||||||
|
// adding the version to the contents
|
||||||
|
if (Version.isDeleteMarker(value)) {
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
// use base Delimiter's implementation
|
||||||
|
return this.keyHandler_NotSkipping_Delimiter(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NotSkippingPrefixNorVersionsV1(key: string, value: string): FilterReturnValue {
|
||||||
|
return this.filter_onNewMasterKeyV1(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_SkippingVersionsV0(key: string, value: string): FilterReturnValue {
|
||||||
|
/* In the SkippingVersionsV0 state, skip all version keys
|
||||||
|
* (<key><versionIdSeparator><version>) */
|
||||||
|
const versionIdIndex = key.indexOf(VID_SEP);
|
||||||
|
if (versionIdIndex !== -1) {
|
||||||
|
// version keys count in the building gap weight because they must
|
||||||
|
// also be listed until skipped
|
||||||
|
if (this._gapBuilding.state === GapBuildingState.Building) {
|
||||||
|
this._updateBuildingGap(key);
|
||||||
|
}
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
return this.filter_onNewMasterKeyV0(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_WaitVersionAfterPHDV0(key: string, value: string): FilterReturnValue {
|
||||||
|
// After a PHD key is encountered, the next version key of the
|
||||||
|
// same object if it exists is the new master key, hence
|
||||||
|
// consider it as such and call 'onNewMasterKeyV0' (the test
|
||||||
|
// 'masterKey == phdKey' is probably redundant when we already
|
||||||
|
// know we have a versioned key, since all objects in v0 have
|
||||||
|
// a master key, but keeping it in doubt)
|
||||||
|
const { masterKey: phdKey } = <DelimiterMasterFilterState_WaitVersionAfterPHDV0> this.state;
|
||||||
|
const versionIdIndex = key.indexOf(VID_SEP);
|
||||||
|
if (versionIdIndex !== -1) {
|
||||||
|
const masterKey = key.slice(0, versionIdIndex);
|
||||||
|
if (masterKey === phdKey) {
|
||||||
|
return this.filter_onNewMasterKeyV0(masterKey, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return this.filter_onNewMasterKeyV0(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_SkippingGapV0(key: string, value: string): FilterReturnValue {
|
||||||
|
const { gapCache, gapCached } = <GapCachingInfo_GapCached> this._gapCaching;
|
||||||
|
if (key <= gapCached.lastKey) {
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
this._gapCaching = {
|
||||||
|
state: GapCachingState.UnknownGap,
|
||||||
|
gapCache,
|
||||||
|
};
|
||||||
|
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
||||||
|
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
||||||
|
});
|
||||||
|
// Start a gap with weight=0 from the latest skippable key. This will
|
||||||
|
// allow to extend the gap just skipped with a chained gap in case
|
||||||
|
// other delete markers are seen after the existing gap is skipped.
|
||||||
|
this._createBuildingGap(gapCached.lastKey, 0, gapCached.weight);
|
||||||
|
|
||||||
|
return this.handleKey(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingBase(): string | undefined {
|
||||||
|
switch (this.state.id) {
|
||||||
|
case DelimiterMasterFilterStateId.SkippingVersionsV0:
|
||||||
|
const { masterKey } = <DelimiterMasterFilterState_SkippingVersionsV0> this.state;
|
||||||
|
return masterKey + inc(VID_SEP);
|
||||||
|
|
||||||
|
case DelimiterMasterFilterStateId.SkippingGapV0:
|
||||||
|
const { gapCached } = <GapCachingInfo_GapCached> this._gapCaching;
|
||||||
|
return gapCached.lastKey;
|
||||||
|
|
||||||
|
default:
|
||||||
|
return super.skippingBase();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result(): ResultObject {
|
||||||
|
this._cutBuildingGap();
|
||||||
|
return super.result();
|
||||||
|
}
|
||||||
|
|
||||||
|
_checkRefreshedBuildingParams(params: GapBuildingParams): GapBuildingParams {
|
||||||
|
if (this._refreshedBuildingParams) {
|
||||||
|
const newParams = this._refreshedBuildingParams;
|
||||||
|
this._refreshedBuildingParams = null;
|
||||||
|
return newParams;
|
||||||
|
}
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save the gap being built if allowed (i.e. still within the
|
||||||
|
* allocated exposure time window).
|
||||||
|
*
|
||||||
|
* @return {boolean} - true if the gap was saved, false if we are
|
||||||
|
* outside the allocated exposure time window.
|
||||||
|
*/
|
||||||
|
_saveBuildingGap(): boolean {
|
||||||
|
const { gapCache, params, gap, gapWeight } =
|
||||||
|
<GapBuildingInfo_Building> this._gapBuilding;
|
||||||
|
const totalElapsed = Date.now() - params.initTimestamp;
|
||||||
|
if (totalElapsed >= gapCache.exposureDelayMs) {
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.Expired,
|
||||||
|
};
|
||||||
|
this._refreshedBuildingParams = null;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const { firstKey, lastKey, weight } = gap;
|
||||||
|
gapCache.setGap(firstKey, lastKey, weight);
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.Building,
|
||||||
|
gapCache,
|
||||||
|
params: this._checkRefreshedBuildingParams(params),
|
||||||
|
gap: {
|
||||||
|
firstKey: gap.lastKey,
|
||||||
|
lastKey: gap.lastKey,
|
||||||
|
weight: 0,
|
||||||
|
},
|
||||||
|
gapWeight,
|
||||||
|
};
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a new gap to be extended afterwards
|
||||||
|
*
|
||||||
|
* @param {string} newKey - gap's first key
|
||||||
|
* @param {number} startWeight - initial weight of the building gap (usually 0 or 1)
|
||||||
|
* @param {number} [cachedWeight] - if continuing a cached gap, weight of the existing
|
||||||
|
* cached portion
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
_createBuildingGap(newKey: string, startWeight: number, cachedWeight?: number): void {
|
||||||
|
if (this._gapBuilding.state === GapBuildingState.NotBuilding) {
|
||||||
|
const { gapCache, params } = <GapBuildingInfo_NotBuilding> this._gapBuilding;
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.Building,
|
||||||
|
gapCache,
|
||||||
|
params: this._checkRefreshedBuildingParams(params),
|
||||||
|
gap: {
|
||||||
|
firstKey: newKey,
|
||||||
|
lastKey: newKey,
|
||||||
|
weight: startWeight,
|
||||||
|
},
|
||||||
|
gapWeight: (cachedWeight || 0) + startWeight,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_updateBuildingGap(newKey: string): void {
|
||||||
|
const gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
||||||
|
const { params, gap } = gapBuilding;
|
||||||
|
gap.lastKey = newKey;
|
||||||
|
gap.weight += 1;
|
||||||
|
gapBuilding.gapWeight += 1;
|
||||||
|
// the GapCache API requires updating a gap regularly because it can only split
|
||||||
|
// it once per update, by the known last key. In practice the default behavior
|
||||||
|
// is to trigger an update after a number of keys that is half the maximum weight.
|
||||||
|
// It is also useful for other listings to benefit from the cache sooner.
|
||||||
|
if (gapBuilding.gapWeight >= params.minGapWeight &&
|
||||||
|
gap.weight >= params.triggerSaveGapWeight) {
|
||||||
|
this._saveBuildingGap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_cutBuildingGap(): void {
|
||||||
|
if (this._gapBuilding.state === GapBuildingState.Building) {
|
||||||
|
let gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
||||||
|
let { gapCache, params, gap, gapWeight } = gapBuilding;
|
||||||
|
// only set gaps that are significant enough in weight and
|
||||||
|
// with a non-empty extension
|
||||||
|
if (gapWeight >= params.minGapWeight && gap.weight > 0) {
|
||||||
|
// we're done if we were not allowed to save the gap
|
||||||
|
if (!this._saveBuildingGap()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// params may have been refreshed, reload them
|
||||||
|
gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
||||||
|
params = gapBuilding.params;
|
||||||
|
}
|
||||||
|
this._gapBuilding = {
|
||||||
|
state: GapBuildingState.NotBuilding,
|
||||||
|
gapCache,
|
||||||
|
params,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,202 @@
|
||||||
|
const { DelimiterVersions } = require('./delimiterVersions');
|
||||||
|
const { FILTER_END, FILTER_SKIP } = require('./tools');
|
||||||
|
|
||||||
|
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters. This extends the base class DelimiterVersions
|
||||||
|
* to return the raw non-current versions objects.
|
||||||
|
*/
|
||||||
|
class DelimiterNonCurrent extends DelimiterVersions {
|
||||||
|
/**
|
||||||
|
* Delimiter listing of non-current versions.
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} parameters.keyMarker - key marker
|
||||||
|
* @param {String} parameters.versionIdMarker - version id marker
|
||||||
|
* @param {String} parameters.beforeDate - limit the response to keys with stale date older than beforeDate.
|
||||||
|
* “stale date” is the date on when a version becomes non-current.
|
||||||
|
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
||||||
|
* @param {String} parameters.excludedDataStoreName - exclude dataStoreName matches from the versions
|
||||||
|
* @param {RequestLogger} logger - The logger of the request
|
||||||
|
* @param {String} [vFormat] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
super(parameters, logger, vFormat);
|
||||||
|
|
||||||
|
this.beforeDate = parameters.beforeDate;
|
||||||
|
this.excludedDataStoreName = parameters.excludedDataStoreName;
|
||||||
|
this.maxScannedLifecycleListingEntries = parameters.maxScannedLifecycleListingEntries;
|
||||||
|
|
||||||
|
// internal state
|
||||||
|
this.prevKey = null;
|
||||||
|
this.staleDate = null;
|
||||||
|
|
||||||
|
this.scannedKeys = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
getLastModified(value) {
|
||||||
|
let lastModified;
|
||||||
|
try {
|
||||||
|
const v = JSON.parse(value);
|
||||||
|
lastModified = v['last-modified'];
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn('could not parse Object Metadata while listing',
|
||||||
|
{
|
||||||
|
method: 'getLastModified',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return lastModified;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Overwrite keyHandler_SkippingVersions to include the last version from the previous listing.
|
||||||
|
// The creation (last-modified) date of this version will be the stale date for the following version.
|
||||||
|
// eslint-disable-next-line camelcase
|
||||||
|
keyHandler_SkippingVersions(key, versionId, value) {
|
||||||
|
if (key === this.keyMarker) {
|
||||||
|
// since the nonversioned key equals the marker, there is
|
||||||
|
// necessarily a versionId in this key
|
||||||
|
const _versionId = versionId;
|
||||||
|
if (_versionId < this.versionIdMarker) {
|
||||||
|
// skip all versions until marker
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.setState({
|
||||||
|
id: 1 /* NotSkipping */,
|
||||||
|
});
|
||||||
|
return this.handleKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
filter(obj) {
|
||||||
|
if (this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries) {
|
||||||
|
this.IsTruncated = true;
|
||||||
|
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
||||||
|
{
|
||||||
|
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
||||||
|
scannedKeys: this.scannedKeys,
|
||||||
|
});
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
++this.scannedKeys;
|
||||||
|
return super.filter(obj);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* NOTE: Each version of a specific key is sorted from the latest to the oldest
|
||||||
|
* thanks to the way version ids are generated.
|
||||||
|
* DESCRIPTION: Skip the version if it represents the master key, but keep its last-modified date in memory,
|
||||||
|
* which will be the stale date of the following version.
|
||||||
|
* The following version is pushed only:
|
||||||
|
* - if the "stale date" (picked up from the previous version) is available (JSON.parse has not failed),
|
||||||
|
* - if "beforeDate" is not specified or if specified and the "stale date" is older.
|
||||||
|
* - if "excludedDataStoreName" is not specified or if specified and the data store name is different
|
||||||
|
* The in-memory "stale date" is then updated with the version's last-modified date to be used for
|
||||||
|
* the following version.
|
||||||
|
* The process stops and returns the available results if either:
|
||||||
|
* - no more metadata key is left to be processed
|
||||||
|
* - the listing reaches the maximum number of key to be returned
|
||||||
|
* - the internal timeout is reached
|
||||||
|
* @param {String} key - The key to add
|
||||||
|
* @param {String} versionId - The version id
|
||||||
|
* @param {String} value - The value of the key
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addVersion(key, versionId, value) {
|
||||||
|
this.nextKeyMarker = key;
|
||||||
|
this.nextVersionIdMarker = versionId;
|
||||||
|
|
||||||
|
// Skip the version if it represents the non-current version, but keep its last-modified date,
|
||||||
|
// which will be the stale date of the following version.
|
||||||
|
const isCurrentVersion = key !== this.prevKey;
|
||||||
|
if (isCurrentVersion) {
|
||||||
|
this.staleDate = this.getLastModified(value);
|
||||||
|
this.prevKey = key;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The following version is pushed only:
|
||||||
|
// - if the "stale date" (picked up from the previous version) is available (JSON.parse has not failed),
|
||||||
|
// - if "beforeDate" is not specified or if specified and the "stale date" is older.
|
||||||
|
// - if "excludedDataStoreName" is not specified or if specified and the data store name is different
|
||||||
|
let lastModified;
|
||||||
|
if (this.staleDate && (!this.beforeDate || this.staleDate < this.beforeDate)) {
|
||||||
|
const parsedValue = this._parse(value);
|
||||||
|
// if parsing fails, skip the key.
|
||||||
|
if (parsedValue) {
|
||||||
|
const dataStoreName = parsedValue.dataStoreName;
|
||||||
|
lastModified = parsedValue['last-modified'];
|
||||||
|
if (!this.excludedDataStoreName || dataStoreName !== this.excludedDataStoreName) {
|
||||||
|
const s = this._stringify(parsedValue, this.staleDate);
|
||||||
|
// check that _stringify succeeds to only push objects with a defined staleDate.
|
||||||
|
if (s) {
|
||||||
|
this.Versions.push({ key, value: s });
|
||||||
|
++this.keys;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The in-memory "stale date" is then updated with the version's last-modified date to be used for
|
||||||
|
// the following version.
|
||||||
|
this.staleDate = lastModified || this.getLastModified(value);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the stringified entry's value and remove the location property if too large.
|
||||||
|
* @param {string} s - sringified value
|
||||||
|
* @return {object} p - undefined if parsing fails, otherwise it contains the parsed value.
|
||||||
|
*/
|
||||||
|
_parse(s) {
|
||||||
|
let p;
|
||||||
|
try {
|
||||||
|
p = JSON.parse(s);
|
||||||
|
if (s.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
||||||
|
delete p.location;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn('Could not parse Object Metadata while listing', {
|
||||||
|
method: 'DelimiterNonCurrent._parse',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
|
_stringify(parsedMD, staleDate) {
|
||||||
|
const p = parsedMD;
|
||||||
|
let s = undefined;
|
||||||
|
p.staleDate = staleDate;
|
||||||
|
try {
|
||||||
|
s = JSON.stringify(p);
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn('could not stringify Object Metadata while listing', {
|
||||||
|
method: 'DelimiterNonCurrent._stringify',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
result() {
|
||||||
|
const { Versions, IsTruncated, NextKeyMarker, NextVersionIdMarker } = super.result();
|
||||||
|
|
||||||
|
const result = {
|
||||||
|
Contents: Versions,
|
||||||
|
IsTruncated,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (NextKeyMarker) {
|
||||||
|
result.NextKeyMarker = NextKeyMarker;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (NextVersionIdMarker) {
|
||||||
|
result.NextVersionIdMarker = NextVersionIdMarker;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
module.exports = { DelimiterNonCurrent };
|
|
@ -0,0 +1,204 @@
|
||||||
|
const DelimiterVersions = require('./delimiterVersions').DelimiterVersions;
|
||||||
|
const { FILTER_END } = require('./tools');
|
||||||
|
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters. This extends the base class DelimiterVersions
|
||||||
|
* to return the orphan delete markers. Orphan delete markers are also
|
||||||
|
* refered as expired object delete marker.
|
||||||
|
* They are delete marker with zero noncurrent versions.
|
||||||
|
*/
|
||||||
|
class DelimiterOrphanDeleteMarker extends DelimiterVersions {
|
||||||
|
/**
|
||||||
|
* Delimiter listing of orphan delete markers.
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} parameters.beforeDate - limit the response to keys older than beforeDate
|
||||||
|
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
||||||
|
* @param {RequestLogger} logger - The logger of the request
|
||||||
|
* @param {String} [vFormat] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
const {
|
||||||
|
marker,
|
||||||
|
maxKeys,
|
||||||
|
prefix,
|
||||||
|
beforeDate,
|
||||||
|
maxScannedLifecycleListingEntries,
|
||||||
|
} = parameters;
|
||||||
|
|
||||||
|
const versionParams = {
|
||||||
|
// The orphan delete marker logic uses the term 'marker' instead of 'keyMarker',
|
||||||
|
// as the latter could suggest the presence of a 'versionIdMarker'.
|
||||||
|
keyMarker: marker,
|
||||||
|
maxKeys,
|
||||||
|
prefix,
|
||||||
|
};
|
||||||
|
super(versionParams, logger, vFormat);
|
||||||
|
|
||||||
|
this.maxScannedLifecycleListingEntries = maxScannedLifecycleListingEntries;
|
||||||
|
this.beforeDate = beforeDate;
|
||||||
|
// this.prevKeyName is used as a marker for the next listing when the current one reaches its entry limit.
|
||||||
|
// We cannot rely on this.keyName, as it contains the name of the current key.
|
||||||
|
// In the event of a listing interruption due to reaching the maximum scanned entries,
|
||||||
|
// relying on this.keyName would cause the next listing to skip the current key because S3 starts
|
||||||
|
// listing after the marker.
|
||||||
|
this.prevKeyName = null;
|
||||||
|
this.keyName = null;
|
||||||
|
this.value = null;
|
||||||
|
this.scannedKeys = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
_reachedMaxKeys() {
|
||||||
|
if (this.keys >= this.maxKeys) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
_addOrphan() {
|
||||||
|
const parsedValue = this._parse(this.value);
|
||||||
|
// if parsing fails, skip the key.
|
||||||
|
if (parsedValue) {
|
||||||
|
const lastModified = parsedValue['last-modified'];
|
||||||
|
const isDeleteMarker = parsedValue.isDeleteMarker;
|
||||||
|
// We then check if the orphan version is a delete marker and if it is older than the "beforeDate"
|
||||||
|
if ((!this.beforeDate || (lastModified && lastModified < this.beforeDate)) && isDeleteMarker) {
|
||||||
|
// Prefer returning an untrimmed data rather than stopping the service in case of parsing failure.
|
||||||
|
const s = this._stringify(parsedValue) || this.value;
|
||||||
|
this.Versions.push({ key: this.keyName, value: s });
|
||||||
|
this.nextKeyMarker = this.keyName;
|
||||||
|
++this.keys;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses the stringified entry's value and remove the location property if too large.
|
||||||
|
* @param {string} s - sringified value
|
||||||
|
* @return {object} p - undefined if parsing fails, otherwise it contains the parsed value.
|
||||||
|
*/
|
||||||
|
_parse(s) {
|
||||||
|
let p;
|
||||||
|
try {
|
||||||
|
p = JSON.parse(s);
|
||||||
|
if (s.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
||||||
|
delete p.location;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn('Could not parse Object Metadata while listing', {
|
||||||
|
method: 'DelimiterOrphanDeleteMarker._parse',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
|
_stringify(value) {
|
||||||
|
const p = value;
|
||||||
|
let s = undefined;
|
||||||
|
try {
|
||||||
|
s = JSON.stringify(p);
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn('could not stringify Object Metadata while listing',
|
||||||
|
{
|
||||||
|
method: 'DelimiterOrphanDeleteMarker._stringify',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The purpose of _isMaxScannedEntriesReached is to restrict the number of scanned entries,
|
||||||
|
* thus controlling resource overhead (CPU...).
|
||||||
|
* @return {boolean} isMaxScannedEntriesReached - true if the maximum limit on the number
|
||||||
|
* of entries scanned has been reached, false otherwise.
|
||||||
|
*/
|
||||||
|
_isMaxScannedEntriesReached() {
|
||||||
|
return this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries;
|
||||||
|
}
|
||||||
|
|
||||||
|
filter(obj) {
|
||||||
|
if (this._isMaxScannedEntriesReached()) {
|
||||||
|
this.nextKeyMarker = this.prevKeyName;
|
||||||
|
this.IsTruncated = true;
|
||||||
|
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
||||||
|
{
|
||||||
|
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
||||||
|
scannedKeys: this.scannedKeys,
|
||||||
|
});
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
++this.scannedKeys;
|
||||||
|
return super.filter(obj);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* NOTE: Each version of a specific key is sorted from the latest to the oldest
|
||||||
|
* thanks to the way version ids are generated.
|
||||||
|
* DESCRIPTION: For a given key, the latest version is kept in memory since it is the current version.
|
||||||
|
* If the following version reference a new key, it means that the previous one was an orphan version.
|
||||||
|
* We then check if the orphan version is a delete marker and if it is older than the "beforeDate"
|
||||||
|
* The process stops and returns the available results if either:
|
||||||
|
* - no more metadata key is left to be processed
|
||||||
|
* - the listing reaches the maximum number of key to be returned
|
||||||
|
* - the internal timeout is reached
|
||||||
|
* NOTE: we cannot leverage MongoDB to list keys older than "beforeDate"
|
||||||
|
* because then we will not be able to assess its orphanage.
|
||||||
|
* @param {String} key - The object key.
|
||||||
|
* @param {String} versionId - The object version id.
|
||||||
|
* @param {String} value - The value of the key
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addVersion(key, versionId, value) {
|
||||||
|
// For a given key, the youngest version is kept in memory since it represents the current version.
|
||||||
|
if (key !== this.keyName) {
|
||||||
|
// If this.value is defined, it means that <this.keyName, this.value> pair is "allowed" to be an orphan.
|
||||||
|
if (this.value) {
|
||||||
|
this._addOrphan();
|
||||||
|
}
|
||||||
|
this.prevKeyName = this.keyName;
|
||||||
|
this.keyName = key;
|
||||||
|
this.value = value;
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the key is not the current version, we can skip it in the next listing
|
||||||
|
// in the case where the current listing is interrupted due to reaching the maximum scanned entries.
|
||||||
|
this.prevKeyName = key;
|
||||||
|
this.keyName = key;
|
||||||
|
this.value = null;
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
result() {
|
||||||
|
// Only check for remaining last orphan delete marker if the listing is not interrupted.
|
||||||
|
// This will help avoid false positives.
|
||||||
|
if (!this._isMaxScannedEntriesReached()) {
|
||||||
|
// The following check makes sure the last orphan delete marker is not forgotten.
|
||||||
|
if (this.keys < this.maxKeys) {
|
||||||
|
if (this.value) {
|
||||||
|
this._addOrphan();
|
||||||
|
}
|
||||||
|
// The following make sure that if makeKeys is reached, isTruncated is set to true.
|
||||||
|
// We moved the "isTruncated" from _reachedMaxKeys to make sure we take into account the last entity
|
||||||
|
// if listing is truncated right before the last entity and the last entity is a orphan delete marker.
|
||||||
|
} else {
|
||||||
|
this.IsTruncated = this.maxKeys > 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = {
|
||||||
|
Contents: this.Versions,
|
||||||
|
IsTruncated: this.IsTruncated,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (this.IsTruncated) {
|
||||||
|
result.NextMarker = this.nextKeyMarker;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { DelimiterOrphanDeleteMarker };
|
|
@ -1,283 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const Delimiter = require('./delimiter').Delimiter;
|
|
||||||
const Version = require('../../versioning/Version').Version;
|
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
|
||||||
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
|
|
||||||
require('./tools');
|
|
||||||
|
|
||||||
const VID_SEP = VSConst.VersionId.Separator;
|
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters
|
|
||||||
*
|
|
||||||
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
|
||||||
* @prop {String[]} Contents - 'files' to list
|
|
||||||
* @prop {Boolean} IsTruncated - truncated listing flag
|
|
||||||
* @prop {String|undefined} NextMarker - marker per amazon format
|
|
||||||
* @prop {Number} keys - count of listed keys
|
|
||||||
* @prop {String|undefined} delimiter - separator per amazon format
|
|
||||||
* @prop {String|undefined} prefix - prefix per amazon format
|
|
||||||
* @prop {Number} maxKeys - number of keys to list
|
|
||||||
*/
|
|
||||||
class DelimiterVersions extends Delimiter {
|
|
||||||
constructor(parameters, logger, vFormat) {
|
|
||||||
super(parameters, logger, vFormat);
|
|
||||||
// specific to version listing
|
|
||||||
this.keyMarker = parameters.keyMarker;
|
|
||||||
this.versionIdMarker = parameters.versionIdMarker;
|
|
||||||
// internal state
|
|
||||||
this.masterKey = undefined;
|
|
||||||
this.masterVersionId = undefined;
|
|
||||||
// listing results
|
|
||||||
this.NextMarker = parameters.keyMarker;
|
|
||||||
this.NextVersionIdMarker = undefined;
|
|
||||||
this.inReplayPrefix = false;
|
|
||||||
|
|
||||||
Object.assign(this, {
|
|
||||||
[BucketVersioningKeyFormat.v0]: {
|
|
||||||
genMDParams: this.genMDParamsV0,
|
|
||||||
filter: this.filterV0,
|
|
||||||
skipping: this.skippingV0,
|
|
||||||
},
|
|
||||||
[BucketVersioningKeyFormat.v1]: {
|
|
||||||
genMDParams: this.genMDParamsV1,
|
|
||||||
filter: this.filterV1,
|
|
||||||
skipping: this.skippingV1,
|
|
||||||
},
|
|
||||||
}[this.vFormat]);
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV0() {
|
|
||||||
const params = {};
|
|
||||||
if (this.parameters.prefix) {
|
|
||||||
params.gte = this.parameters.prefix;
|
|
||||||
params.lt = inc(this.parameters.prefix);
|
|
||||||
}
|
|
||||||
if (this.parameters.keyMarker) {
|
|
||||||
if (params.gte && params.gte > this.parameters.keyMarker) {
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
delete params.gte;
|
|
||||||
if (this.parameters.versionIdMarker) {
|
|
||||||
// versionIdMarker should always come with keyMarker
|
|
||||||
// but may not be the other way around
|
|
||||||
params.gt = this.parameters.keyMarker
|
|
||||||
+ VID_SEP
|
|
||||||
+ this.parameters.versionIdMarker;
|
|
||||||
} else {
|
|
||||||
params.gt = inc(this.parameters.keyMarker + VID_SEP);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV1() {
|
|
||||||
// return an array of two listing params sets to ask for
|
|
||||||
// synchronized listing of M and V ranges
|
|
||||||
const params = [{}, {}];
|
|
||||||
if (this.parameters.prefix) {
|
|
||||||
params[0].gte = DbPrefixes.Master + this.parameters.prefix;
|
|
||||||
params[0].lt = DbPrefixes.Master + inc(this.parameters.prefix);
|
|
||||||
params[1].gte = DbPrefixes.Version + this.parameters.prefix;
|
|
||||||
params[1].lt = DbPrefixes.Version + inc(this.parameters.prefix);
|
|
||||||
} else {
|
|
||||||
params[0].gte = DbPrefixes.Master;
|
|
||||||
params[0].lt = inc(DbPrefixes.Master); // stop after the last master key
|
|
||||||
params[1].gte = DbPrefixes.Version;
|
|
||||||
params[1].lt = inc(DbPrefixes.Version); // stop after the last version key
|
|
||||||
}
|
|
||||||
if (this.parameters.keyMarker) {
|
|
||||||
if (params[1].gte <= DbPrefixes.Version + this.parameters.keyMarker) {
|
|
||||||
delete params[0].gte;
|
|
||||||
delete params[1].gte;
|
|
||||||
params[0].gt = DbPrefixes.Master + inc(this.parameters.keyMarker + VID_SEP);
|
|
||||||
if (this.parameters.versionIdMarker) {
|
|
||||||
// versionIdMarker should always come with keyMarker
|
|
||||||
// but may not be the other way around
|
|
||||||
params[1].gt = DbPrefixes.Version
|
|
||||||
+ this.parameters.keyMarker
|
|
||||||
+ VID_SEP
|
|
||||||
+ this.parameters.versionIdMarker;
|
|
||||||
} else {
|
|
||||||
params[1].gt = DbPrefixes.Version
|
|
||||||
+ inc(this.parameters.keyMarker + VID_SEP);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Used to synchronize listing of M and V prefixes by object key
|
|
||||||
*
|
|
||||||
* @param {object} masterObj object listed from first range
|
|
||||||
* returned by genMDParamsV1() (the master keys range)
|
|
||||||
* @param {object} versionObj object listed from second range
|
|
||||||
* returned by genMDParamsV1() (the version keys range)
|
|
||||||
* @return {number} comparison result:
|
|
||||||
* * -1 if master key < version key
|
|
||||||
* * 1 if master key > version key
|
|
||||||
*/
|
|
||||||
compareObjects(masterObj, versionObj) {
|
|
||||||
const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
|
|
||||||
const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
|
|
||||||
return masterKey < versionKey ? -1 : 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a (key, versionId, value) tuple to the listing.
|
|
||||||
* Set the NextMarker to the current key
|
|
||||||
* Increment the keys counter
|
|
||||||
* @param {object} obj - the entry to add to the listing result
|
|
||||||
* @param {String} obj.key - The key to add
|
|
||||||
* @param {String} obj.versionId - versionId
|
|
||||||
* @param {String} obj.value - The value of the key
|
|
||||||
* @return {Boolean} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
addContents(obj) {
|
|
||||||
if (this._reachedMaxKeys()) {
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
this.Contents.push({
|
|
||||||
key: obj.key,
|
|
||||||
value: this.trimMetadata(obj.value),
|
|
||||||
versionId: obj.versionId,
|
|
||||||
});
|
|
||||||
this.NextMarker = obj.key;
|
|
||||||
this.NextVersionIdMarker = obj.versionId;
|
|
||||||
++this.keys;
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter to apply on each iteration if bucket is in v0
|
|
||||||
* versioning key format, based on:
|
|
||||||
* - prefix
|
|
||||||
* - delimiter
|
|
||||||
* - maxKeys
|
|
||||||
* The marker is being handled directly by levelDB
|
|
||||||
* @param {Object} obj - The key and value of the element
|
|
||||||
* @param {String} obj.key - The key of the element
|
|
||||||
* @param {String} obj.value - The value of the element
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
filterV0(obj) {
|
|
||||||
if (obj.key.startsWith(DbPrefixes.Replay)) {
|
|
||||||
this.inReplayPrefix = true;
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
this.inReplayPrefix = false;
|
|
||||||
|
|
||||||
if (Version.isPHD(obj.value)) {
|
|
||||||
// return accept to avoid skipping the next values in range
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
return this.filterCommon(obj.key, obj.value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter to apply on each iteration if bucket is in v1
|
|
||||||
* versioning key format, based on:
|
|
||||||
* - prefix
|
|
||||||
* - delimiter
|
|
||||||
* - maxKeys
|
|
||||||
* The marker is being handled directly by levelDB
|
|
||||||
* @param {Object} obj - The key and value of the element
|
|
||||||
* @param {String} obj.key - The key of the element
|
|
||||||
* @param {String} obj.value - The value of the element
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
filterV1(obj) {
|
|
||||||
if (Version.isPHD(obj.value)) {
|
|
||||||
// return accept to avoid skipping the next values in range
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
// this function receives both M and V keys, but their prefix
|
|
||||||
// length is the same so we can remove their prefix without
|
|
||||||
// looking at the type of key
|
|
||||||
return this.filterCommon(obj.key.slice(DbPrefixes.Master.length),
|
|
||||||
obj.value);
|
|
||||||
}
|
|
||||||
|
|
||||||
filterCommon(key, value) {
|
|
||||||
if (this.prefix && !key.startsWith(this.prefix)) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
let nonversionedKey;
|
|
||||||
let versionId = undefined;
|
|
||||||
const versionIdIndex = key.indexOf(VID_SEP);
|
|
||||||
if (versionIdIndex < 0) {
|
|
||||||
nonversionedKey = key;
|
|
||||||
this.masterKey = key;
|
|
||||||
this.masterVersionId =
|
|
||||||
Version.from(value).getVersionId() || 'null';
|
|
||||||
versionId = this.masterVersionId;
|
|
||||||
} else {
|
|
||||||
nonversionedKey = key.slice(0, versionIdIndex);
|
|
||||||
versionId = key.slice(versionIdIndex + 1);
|
|
||||||
// skip a version key if it is the master version
|
|
||||||
if (this.masterKey === nonversionedKey && this.masterVersionId === versionId) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
this.masterKey = undefined;
|
|
||||||
this.masterVersionId = undefined;
|
|
||||||
}
|
|
||||||
if (this.delimiter) {
|
|
||||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
|
||||||
const delimiterIndex = nonversionedKey.indexOf(this.delimiter, baseIndex);
|
|
||||||
if (delimiterIndex >= 0) {
|
|
||||||
return this.addCommonPrefix(nonversionedKey, delimiterIndex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return this.addContents({ key: nonversionedKey, value, versionId });
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingV0() {
|
|
||||||
if (this.inReplayPrefix) {
|
|
||||||
return DbPrefixes.Replay;
|
|
||||||
}
|
|
||||||
if (this.NextMarker) {
|
|
||||||
const index = this.NextMarker.lastIndexOf(this.delimiter);
|
|
||||||
if (index === this.NextMarker.length - 1) {
|
|
||||||
return this.NextMarker;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingV1() {
|
|
||||||
const skipV0 = this.skippingV0();
|
|
||||||
if (skipV0 === SKIP_NONE) {
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
// skip to the same object key in both M and V range listings
|
|
||||||
return [DbPrefixes.Master + skipV0,
|
|
||||||
DbPrefixes.Version + skipV0];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return an object containing all mandatory fields to use once the
|
|
||||||
* iteration is done, doesn't show a NextMarker field if the output
|
|
||||||
* isn't truncated
|
|
||||||
* @return {Object} - following amazon format
|
|
||||||
*/
|
|
||||||
result() {
|
|
||||||
/* NextMarker is only provided when delimiter is used.
|
|
||||||
* specified in v1 listing documentation
|
|
||||||
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
|
|
||||||
*/
|
|
||||||
return {
|
|
||||||
CommonPrefixes: this.CommonPrefixes,
|
|
||||||
Versions: this.Contents,
|
|
||||||
IsTruncated: this.IsTruncated,
|
|
||||||
NextKeyMarker: this.IsTruncated ? this.NextMarker : undefined,
|
|
||||||
NextVersionIdMarker: this.IsTruncated ?
|
|
||||||
this.NextVersionIdMarker : undefined,
|
|
||||||
Delimiter: this.delimiter,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { DelimiterVersions };
|
|
|
@ -0,0 +1,535 @@
|
||||||
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
|
const Extension = require('./Extension').default;
|
||||||
|
|
||||||
|
import {
|
||||||
|
FilterState,
|
||||||
|
FilterReturnValue,
|
||||||
|
} from './delimiter';
|
||||||
|
|
||||||
|
const Version = require('../../versioning/Version').Version;
|
||||||
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
|
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
|
||||||
|
require('./tools');
|
||||||
|
|
||||||
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
|
||||||
|
export const enum DelimiterVersionsFilterStateId {
|
||||||
|
NotSkipping = 1,
|
||||||
|
SkippingPrefix = 2,
|
||||||
|
SkippingVersions = 3,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface DelimiterVersionsFilterState_NotSkipping extends FilterState {
|
||||||
|
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface DelimiterVersionsFilterState_SkippingPrefix extends FilterState {
|
||||||
|
id: DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||||
|
prefix: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface DelimiterVersionsFilterState_SkippingVersions extends FilterState {
|
||||||
|
id: DelimiterVersionsFilterStateId.SkippingVersions,
|
||||||
|
gt: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type KeyHandler = (key: string, versionId: string | undefined, value: string) => FilterReturnValue;
|
||||||
|
|
||||||
|
type ResultObject = {
|
||||||
|
CommonPrefixes: string[],
|
||||||
|
Versions: {
|
||||||
|
key: string;
|
||||||
|
value: string;
|
||||||
|
versionId: string;
|
||||||
|
}[];
|
||||||
|
IsTruncated: boolean;
|
||||||
|
Delimiter ?: string;
|
||||||
|
NextKeyMarker ?: string;
|
||||||
|
NextVersionIdMarker ?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type GenMDParamsItem = {
|
||||||
|
gt ?: string,
|
||||||
|
gte ?: string,
|
||||||
|
lt ?: string,
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters
|
||||||
|
*
|
||||||
|
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
||||||
|
* @prop {String[]} Contents - 'files' to list
|
||||||
|
* @prop {Boolean} IsTruncated - truncated listing flag
|
||||||
|
* @prop {String|undefined} NextMarker - marker per amazon format
|
||||||
|
* @prop {Number} keys - count of listed keys
|
||||||
|
* @prop {String|undefined} delimiter - separator per amazon format
|
||||||
|
* @prop {String|undefined} prefix - prefix per amazon format
|
||||||
|
* @prop {Number} maxKeys - number of keys to list
|
||||||
|
*/
|
||||||
|
export class DelimiterVersions extends Extension {
|
||||||
|
|
||||||
|
state: FilterState;
|
||||||
|
keyHandlers: { [id: number]: KeyHandler };
|
||||||
|
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
super(parameters, logger);
|
||||||
|
// original listing parameters
|
||||||
|
this.delimiter = parameters.delimiter;
|
||||||
|
this.prefix = parameters.prefix;
|
||||||
|
this.maxKeys = parameters.maxKeys || 1000;
|
||||||
|
// specific to version listing
|
||||||
|
this.keyMarker = parameters.keyMarker;
|
||||||
|
this.versionIdMarker = parameters.versionIdMarker;
|
||||||
|
// internal state
|
||||||
|
this.masterKey = undefined;
|
||||||
|
this.masterVersionId = undefined;
|
||||||
|
this.nullKey = null;
|
||||||
|
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
||||||
|
// listing results
|
||||||
|
this.CommonPrefixes = [];
|
||||||
|
this.Versions = [];
|
||||||
|
this.IsTruncated = false;
|
||||||
|
this.nextKeyMarker = parameters.keyMarker;
|
||||||
|
this.nextVersionIdMarker = undefined;
|
||||||
|
|
||||||
|
this.keyHandlers = {};
|
||||||
|
|
||||||
|
Object.assign(this, {
|
||||||
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
|
genMDParams: this.genMDParamsV0,
|
||||||
|
getObjectKey: this.getObjectKeyV0,
|
||||||
|
skipping: this.skippingV0,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
|
genMDParams: this.genMDParamsV1,
|
||||||
|
getObjectKey: this.getObjectKeyV1,
|
||||||
|
skipping: this.skippingV1,
|
||||||
|
},
|
||||||
|
}[this.vFormat]);
|
||||||
|
|
||||||
|
if (this.vFormat === BucketVersioningKeyFormat.v0) {
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NotSkippingV0.bind(this));
|
||||||
|
} else {
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
this.keyHandler_NotSkippingV1.bind(this));
|
||||||
|
}
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||||
|
this.keyHandler_SkippingPrefix.bind(this));
|
||||||
|
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterVersionsFilterStateId.SkippingVersions,
|
||||||
|
this.keyHandler_SkippingVersions.bind(this));
|
||||||
|
|
||||||
|
if (this.versionIdMarker) {
|
||||||
|
this.state = <DelimiterVersionsFilterState_SkippingVersions> {
|
||||||
|
id: DelimiterVersionsFilterStateId.SkippingVersions,
|
||||||
|
gt: `${this.keyMarker}${VID_SEP}${this.versionIdMarker}`,
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
this.state = <DelimiterVersionsFilterState_NotSkipping> {
|
||||||
|
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV0() {
|
||||||
|
const params: GenMDParamsItem = {};
|
||||||
|
if (this.prefix) {
|
||||||
|
params.gte = this.prefix;
|
||||||
|
params.lt = inc(this.prefix);
|
||||||
|
}
|
||||||
|
if (this.keyMarker && this.delimiter) {
|
||||||
|
const commonPrefix = this.getCommonPrefix(this.keyMarker);
|
||||||
|
if (commonPrefix) {
|
||||||
|
const afterPrefix = inc(commonPrefix);
|
||||||
|
if (!params.gte || afterPrefix > params.gte) {
|
||||||
|
params.gte = afterPrefix;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this.keyMarker && (!params.gte || this.keyMarker >= params.gte)) {
|
||||||
|
delete params.gte;
|
||||||
|
if (this.versionIdMarker) {
|
||||||
|
// start from the beginning of versions so we can
|
||||||
|
// check if there's a null key and fetch it
|
||||||
|
// (afterwards, we can skip the rest of versions until
|
||||||
|
// we reach versionIdMarker)
|
||||||
|
params.gte = `${this.keyMarker}${VID_SEP}`;
|
||||||
|
} else {
|
||||||
|
params.gt = `${this.keyMarker}${inc(VID_SEP)}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV1() {
|
||||||
|
// return an array of two listing params sets to ask for
|
||||||
|
// synchronized listing of M and V ranges
|
||||||
|
const v0Params: GenMDParamsItem = this.genMDParamsV0();
|
||||||
|
const mParams: GenMDParamsItem = {};
|
||||||
|
const vParams: GenMDParamsItem = {};
|
||||||
|
if (v0Params.gt) {
|
||||||
|
mParams.gt = `${DbPrefixes.Master}${v0Params.gt}`;
|
||||||
|
vParams.gt = `${DbPrefixes.Version}${v0Params.gt}`;
|
||||||
|
} else if (v0Params.gte) {
|
||||||
|
mParams.gte = `${DbPrefixes.Master}${v0Params.gte}`;
|
||||||
|
vParams.gte = `${DbPrefixes.Version}${v0Params.gte}`;
|
||||||
|
} else {
|
||||||
|
mParams.gte = DbPrefixes.Master;
|
||||||
|
vParams.gte = DbPrefixes.Version;
|
||||||
|
}
|
||||||
|
if (v0Params.lt) {
|
||||||
|
mParams.lt = `${DbPrefixes.Master}${v0Params.lt}`;
|
||||||
|
vParams.lt = `${DbPrefixes.Version}${v0Params.lt}`;
|
||||||
|
} else {
|
||||||
|
mParams.lt = inc(DbPrefixes.Master);
|
||||||
|
vParams.lt = inc(DbPrefixes.Version);
|
||||||
|
}
|
||||||
|
return [mParams, vParams];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check if the max keys count has been reached and set the
|
||||||
|
* final state of the result if it is the case
|
||||||
|
* @return {Boolean} - indicates if the iteration has to stop
|
||||||
|
*/
|
||||||
|
_reachedMaxKeys(): boolean {
|
||||||
|
if (this.keys >= this.maxKeys) {
|
||||||
|
// In cases of maxKeys <= 0 -> IsTruncated = false
|
||||||
|
this.IsTruncated = this.maxKeys > 0;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used to synchronize listing of M and V prefixes by object key
|
||||||
|
*
|
||||||
|
* @param {object} masterObj object listed from first range
|
||||||
|
* returned by genMDParamsV1() (the master keys range)
|
||||||
|
* @param {object} versionObj object listed from second range
|
||||||
|
* returned by genMDParamsV1() (the version keys range)
|
||||||
|
* @return {number} comparison result:
|
||||||
|
* * -1 if master key < version key
|
||||||
|
* * 1 if master key > version key
|
||||||
|
*/
|
||||||
|
compareObjects(masterObj, versionObj) {
|
||||||
|
const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
|
||||||
|
const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
|
||||||
|
return masterKey < versionKey ? -1 : 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a listing key into its nonversioned key and version ID components
|
||||||
|
*
|
||||||
|
* @param {string} key - full listing key
|
||||||
|
* @return {object} obj
|
||||||
|
* @return {string} obj.key - nonversioned part of key
|
||||||
|
* @return {string} [obj.versionId] - version ID in the key
|
||||||
|
*/
|
||||||
|
parseKey(fullKey: string): { key: string, versionId ?: string } {
|
||||||
|
const versionIdIndex = fullKey.indexOf(VID_SEP);
|
||||||
|
if (versionIdIndex === -1) {
|
||||||
|
return { key: fullKey };
|
||||||
|
}
|
||||||
|
const nonversionedKey: string = fullKey.slice(0, versionIdIndex);
|
||||||
|
let versionId: string = fullKey.slice(versionIdIndex + 1);
|
||||||
|
return { key: nonversionedKey, versionId };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Include a key in the listing output, in the Versions or CommonPrefix result
|
||||||
|
*
|
||||||
|
* @param {string} key - key (without version ID)
|
||||||
|
* @param {string} versionId - version ID
|
||||||
|
* @param {string} value - metadata value
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addKey(key: string, versionId: string, value: string) {
|
||||||
|
// add the subprefix to the common prefixes if the key has the delimiter
|
||||||
|
const commonPrefix = this.getCommonPrefix(key);
|
||||||
|
if (commonPrefix) {
|
||||||
|
this.addCommonPrefix(commonPrefix);
|
||||||
|
// transition into SkippingPrefix state to skip all following keys
|
||||||
|
// while they start with the same prefix
|
||||||
|
this.setState(<DelimiterVersionsFilterState_SkippingPrefix> {
|
||||||
|
id: DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||||
|
prefix: commonPrefix,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
this.addVersion(key, versionId, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a (key, versionId, value) tuple to the listing.
|
||||||
|
* Set the NextMarker to the current key
|
||||||
|
* Increment the keys counter
|
||||||
|
* @param {String} key - The key to add
|
||||||
|
* @param {String} versionId - versionId
|
||||||
|
* @param {String} value - The value of the key
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addVersion(key: string, versionId: string, value: string) {
|
||||||
|
this.Versions.push({
|
||||||
|
key,
|
||||||
|
versionId,
|
||||||
|
value: this.trimMetadata(value),
|
||||||
|
});
|
||||||
|
this.nextKeyMarker = key;
|
||||||
|
this.nextVersionIdMarker = versionId;
|
||||||
|
++this.keys;
|
||||||
|
}
|
||||||
|
|
||||||
|
getCommonPrefix(key: string): string | undefined {
|
||||||
|
if (!this.delimiter) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||||
|
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
||||||
|
if (delimiterIndex === -1) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return key.substring(0, delimiterIndex + this.delimiter.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a Common Prefix in the list
|
||||||
|
* @param {String} commonPrefix - common prefix to add
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addCommonPrefix(commonPrefix: string): void {
|
||||||
|
// add the new prefix to the list
|
||||||
|
this.CommonPrefixes.push(commonPrefix);
|
||||||
|
++this.keys;
|
||||||
|
this.nextKeyMarker = commonPrefix;
|
||||||
|
this.nextVersionIdMarker = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cache the current null key, to save it for outputting it later at
|
||||||
|
* the correct position
|
||||||
|
*
|
||||||
|
* @param {String} key - nonversioned key of the null key
|
||||||
|
* @param {String} versionId - real version ID of the null key
|
||||||
|
* @param {String} value - value of the null key
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
cacheNullKey(key: string, versionId: string, value: string): void {
|
||||||
|
this.nullKey = { key, versionId, value };
|
||||||
|
}
|
||||||
|
|
||||||
|
getObjectKeyV0(obj: { key: string }): string {
|
||||||
|
return obj.key;
|
||||||
|
}
|
||||||
|
|
||||||
|
getObjectKeyV1(obj: { key: string }): string {
|
||||||
|
return obj.key.slice(DbPrefixes.Master.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter to apply on each iteration, based on:
|
||||||
|
* - prefix
|
||||||
|
* - delimiter
|
||||||
|
* - maxKeys
|
||||||
|
* The marker is being handled directly by levelDB
|
||||||
|
* @param {Object} obj - The key and value of the element
|
||||||
|
* @param {String} obj.key - The key of the element
|
||||||
|
* @param {String} obj.value - The value of the element
|
||||||
|
* @return {number} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
filter(obj: { key: string, value: string }): FilterReturnValue {
|
||||||
|
const key = this.getObjectKey(obj);
|
||||||
|
const value = obj.value;
|
||||||
|
|
||||||
|
const { key: nonversionedKey, versionId: keyVersionId } = this.parseKey(key);
|
||||||
|
if (this.nullKey) {
|
||||||
|
if (this.nullKey.key !== nonversionedKey
|
||||||
|
|| this.nullKey.versionId < <string> keyVersionId) {
|
||||||
|
this.handleKey(
|
||||||
|
this.nullKey.key, this.nullKey.versionId, this.nullKey.value);
|
||||||
|
this.nullKey = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (keyVersionId === '') {
|
||||||
|
// null key
|
||||||
|
this.cacheNullKey(nonversionedKey, Version.from(value).getVersionId(), value);
|
||||||
|
if (this.state.id === DelimiterVersionsFilterStateId.SkippingVersions) {
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
return this.handleKey(nonversionedKey, keyVersionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
setState(state: FilterState): void {
|
||||||
|
this.state = state;
|
||||||
|
}
|
||||||
|
|
||||||
|
setKeyHandler(stateId: number, keyHandler: KeyHandler): void {
|
||||||
|
this.keyHandlers[stateId] = keyHandler;
|
||||||
|
}
|
||||||
|
|
||||||
|
handleKey(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
return this.keyHandlers[this.state.id](key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NotSkippingV0(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
if (key.startsWith(DbPrefixes.Replay)) {
|
||||||
|
// skip internal replay prefix entirely
|
||||||
|
this.setState(<DelimiterVersionsFilterState_SkippingPrefix> {
|
||||||
|
id: DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||||
|
prefix: DbPrefixes.Replay,
|
||||||
|
});
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
if (Version.isPHD(value)) {
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
return this.filter_onNewKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_NotSkippingV1(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
// NOTE: this check on PHD is only useful for Artesca, S3C
|
||||||
|
// does not use PHDs in V1 format
|
||||||
|
if (Version.isPHD(value)) {
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
return this.filter_onNewKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
filter_onNewKey(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
if (this._reachedMaxKeys()) {
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
if (versionId === undefined) {
|
||||||
|
this.masterKey = key;
|
||||||
|
this.masterVersionId = Version.from(value).getVersionId() || 'null';
|
||||||
|
this.addKey(this.masterKey, this.masterVersionId, value);
|
||||||
|
} else {
|
||||||
|
if (this.masterKey === key && this.masterVersionId === versionId) {
|
||||||
|
// do not add a version key if it is the master version
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
this.addKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_SkippingPrefix(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state;
|
||||||
|
if (key.startsWith(prefix)) {
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
this.setState(<DelimiterVersionsFilterState_NotSkipping> {
|
||||||
|
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
});
|
||||||
|
return this.handleKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_SkippingVersions(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
||||||
|
if (key === this.keyMarker) {
|
||||||
|
// since the nonversioned key equals the marker, there is
|
||||||
|
// necessarily a versionId in this key
|
||||||
|
const _versionId = <string> versionId;
|
||||||
|
if (_versionId < this.versionIdMarker) {
|
||||||
|
// skip all versions until marker
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
if (_versionId === this.versionIdMarker) {
|
||||||
|
// nothing left to skip, so return ACCEPT, but don't add this version
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.setState(<DelimiterVersionsFilterState_NotSkipping> {
|
||||||
|
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
});
|
||||||
|
return this.handleKey(key, versionId, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingBase(): string | undefined {
|
||||||
|
switch (this.state.id) {
|
||||||
|
case DelimiterVersionsFilterStateId.SkippingPrefix:
|
||||||
|
const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state;
|
||||||
|
return inc(prefix);
|
||||||
|
|
||||||
|
case DelimiterVersionsFilterStateId.SkippingVersions:
|
||||||
|
const { gt } = <DelimiterVersionsFilterState_SkippingVersions> this.state;
|
||||||
|
// the contract of skipping() is to return the first key
|
||||||
|
// that can be skipped to, so adding a null byte to skip
|
||||||
|
// over the existing versioned key set in 'gt'
|
||||||
|
return `${gt}\0`;
|
||||||
|
|
||||||
|
default:
|
||||||
|
return SKIP_NONE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingV0() {
|
||||||
|
return this.skippingBase();
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingV1() {
|
||||||
|
const skipTo = this.skippingBase();
|
||||||
|
if (skipTo === SKIP_NONE) {
|
||||||
|
return SKIP_NONE;
|
||||||
|
}
|
||||||
|
// skip to the same object key in both M and V range listings
|
||||||
|
return [
|
||||||
|
`${DbPrefixes.Master}${skipTo}`,
|
||||||
|
`${DbPrefixes.Version}${skipTo}`,
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return an object containing all mandatory fields to use once the
|
||||||
|
* iteration is done, doesn't show a NextMarker field if the output
|
||||||
|
* isn't truncated
|
||||||
|
* @return {Object} - following amazon format
|
||||||
|
*/
|
||||||
|
result() {
|
||||||
|
// Add the last null key if still in cache (when it is the
|
||||||
|
// last version of the last key)
|
||||||
|
//
|
||||||
|
// NOTE: _reachedMaxKeys sets IsTruncated to true when it
|
||||||
|
// returns true. Here we want this because either:
|
||||||
|
//
|
||||||
|
// - we did not reach the max keys yet so the result is not
|
||||||
|
// - truncated, and there is still room for the null key in
|
||||||
|
// - the results
|
||||||
|
//
|
||||||
|
// - OR we reached it already while having to process a new
|
||||||
|
// key (so the result is truncated even without the null key)
|
||||||
|
//
|
||||||
|
// - OR we are *just* below the limit but the null key to add
|
||||||
|
// does not fit, so we know the result is now truncated
|
||||||
|
// because there remains the null key to be output.
|
||||||
|
//
|
||||||
|
if (this.nullKey) {
|
||||||
|
this.handleKey(this.nullKey.key, this.nullKey.versionId, this.nullKey.value);
|
||||||
|
}
|
||||||
|
const result: ResultObject = {
|
||||||
|
CommonPrefixes: this.CommonPrefixes,
|
||||||
|
Versions: this.Versions,
|
||||||
|
IsTruncated: this.IsTruncated,
|
||||||
|
};
|
||||||
|
if (this.delimiter) {
|
||||||
|
result.Delimiter = this.delimiter;
|
||||||
|
}
|
||||||
|
if (this.IsTruncated) {
|
||||||
|
result.NextKeyMarker = this.nextKeyMarker;
|
||||||
|
if (this.nextVersionIdMarker) {
|
||||||
|
result.NextVersionIdMarker = this.nextVersionIdMarker;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { DelimiterVersions };
|
|
@ -6,4 +6,7 @@ module.exports = {
|
||||||
DelimiterMaster: require('./delimiterMaster')
|
DelimiterMaster: require('./delimiterMaster')
|
||||||
.DelimiterMaster,
|
.DelimiterMaster,
|
||||||
MPU: require('./MPU').MultipartUploads,
|
MPU: require('./MPU').MultipartUploads,
|
||||||
|
DelimiterCurrent: require('./delimiterCurrent').DelimiterCurrent,
|
||||||
|
DelimiterNonCurrent: require('./delimiterNonCurrent').DelimiterNonCurrent,
|
||||||
|
DelimiterOrphanDeleteMarker: require('./delimiterOrphanDeleteMarker').DelimiterOrphanDeleteMarker,
|
||||||
};
|
};
|
||||||
|
|
|
@ -52,21 +52,21 @@ class Skip {
|
||||||
assert(this.skipRangeCb);
|
assert(this.skipRangeCb);
|
||||||
|
|
||||||
const filteringResult = this.extension.filter(entry);
|
const filteringResult = this.extension.filter(entry);
|
||||||
const skippingRange = this.extension.skipping();
|
const skipTo = this.extension.skipping();
|
||||||
|
|
||||||
if (filteringResult === FILTER_END) {
|
if (filteringResult === FILTER_END) {
|
||||||
this.listingEndCb();
|
this.listingEndCb();
|
||||||
} else if (filteringResult === FILTER_SKIP
|
} else if (filteringResult === FILTER_SKIP
|
||||||
&& skippingRange !== SKIP_NONE) {
|
&& skipTo !== SKIP_NONE) {
|
||||||
if (++this.streakLength >= MAX_STREAK_LENGTH) {
|
if (++this.streakLength >= MAX_STREAK_LENGTH) {
|
||||||
let newRange;
|
let newRange;
|
||||||
if (Array.isArray(skippingRange)) {
|
if (Array.isArray(skipTo)) {
|
||||||
newRange = [];
|
newRange = [];
|
||||||
for (let i = 0; i < skippingRange.length; ++i) {
|
for (let i = 0; i < skipTo.length; ++i) {
|
||||||
newRange.push(this._inc(skippingRange[i]));
|
newRange.push(skipTo[i]);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
newRange = this._inc(skippingRange);
|
newRange = skipTo;
|
||||||
}
|
}
|
||||||
/* Avoid to loop on the same range again and again. */
|
/* Avoid to loop on the same range again and again. */
|
||||||
if (newRange === this.gteParams) {
|
if (newRange === this.gteParams) {
|
||||||
|
@ -79,16 +79,6 @@ class Skip {
|
||||||
this.streakLength = 0;
|
this.streakLength = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_inc(str) {
|
|
||||||
if (!str) {
|
|
||||||
return str;
|
|
||||||
}
|
|
||||||
const lastCharValue = str.charCodeAt(str.length - 1);
|
|
||||||
const lastCharNewValue = String.fromCharCode(lastCharValue + 1);
|
|
||||||
|
|
||||||
return `${str.slice(0, str.length - 1)}${lastCharNewValue}`;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@ function vaultSignatureCb(
|
||||||
err: Error | null,
|
err: Error | null,
|
||||||
authInfo: { message: { body: any } },
|
authInfo: { message: { body: any } },
|
||||||
log: Logger,
|
log: Logger,
|
||||||
callback: (err: Error | null, data?: any, results?: any, params?: any) => void,
|
callback: (err: Error | null, data?: any, results?: any, params?: any, infos?: any) => void,
|
||||||
streamingV4Params?: any
|
streamingV4Params?: any
|
||||||
) {
|
) {
|
||||||
// vaultclient API guarantees that it returns:
|
// vaultclient API guarantees that it returns:
|
||||||
|
@ -38,7 +38,9 @@ function vaultSignatureCb(
|
||||||
}
|
}
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
log.addDefaultFields(auditLog);
|
log.addDefaultFields(auditLog);
|
||||||
return callback(null, userInfo, authorizationResults, streamingV4Params);
|
return callback(null, userInfo, authorizationResults, streamingV4Params, {
|
||||||
|
accountQuota: info.accountQuota || {},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export type AuthV4RequestParams = {
|
export type AuthV4RequestParams = {
|
||||||
|
@ -384,4 +386,19 @@ export default class Vault {
|
||||||
return callback(null, respBody);
|
return callback(null, respBody);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
report(log: Logger, callback: (err: Error | null, data?: any) => void) {
|
||||||
|
// call the report function of the client
|
||||||
|
if (!this.client.report) {
|
||||||
|
return callback(null, {});
|
||||||
|
}
|
||||||
|
// @ts-ignore
|
||||||
|
return this.client.report(log.getSerializedUids(), (err: Error | null, obj?: any) => {
|
||||||
|
if (err) {
|
||||||
|
log.debug(`error from ${this.implName}`, { error: err });
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return callback(null, obj);
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -163,6 +163,20 @@ function doAuth(
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function will generate a version 4 content-md5 header
|
||||||
|
* It looks at the request path to determine what kind of header encoding is required
|
||||||
|
*
|
||||||
|
* @param path - the request path
|
||||||
|
* @param payload - the request payload to hash
|
||||||
|
*/
|
||||||
|
function generateContentMD5Header(
|
||||||
|
path: string,
|
||||||
|
payload: string,
|
||||||
|
) {
|
||||||
|
const encoding = path && path.startsWith('/_/backbeat/') ? 'hex' : 'base64';
|
||||||
|
return crypto.createHash('md5').update(payload, 'binary').digest(encoding);
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* This function will generate a version 4 header
|
* This function will generate a version 4 header
|
||||||
*
|
*
|
||||||
|
@ -175,6 +189,7 @@ function doAuth(
|
||||||
* @param [proxyPath] - path that gets proxied by reverse proxy
|
* @param [proxyPath] - path that gets proxied by reverse proxy
|
||||||
* @param [sessionToken] - security token if the access/secret keys
|
* @param [sessionToken] - security token if the access/secret keys
|
||||||
* are temporary credentials from STS
|
* are temporary credentials from STS
|
||||||
|
* @param [payload] - body of the request if any
|
||||||
*/
|
*/
|
||||||
function generateV4Headers(
|
function generateV4Headers(
|
||||||
request: any,
|
request: any,
|
||||||
|
@ -182,8 +197,9 @@ function generateV4Headers(
|
||||||
accessKey: string,
|
accessKey: string,
|
||||||
secretKeyValue: string,
|
secretKeyValue: string,
|
||||||
awsService: string,
|
awsService: string,
|
||||||
proxyPath: string,
|
proxyPath?: string,
|
||||||
sessionToken: string
|
sessionToken?: string,
|
||||||
|
payload?: string,
|
||||||
) {
|
) {
|
||||||
Object.assign(request, { headers: {} });
|
Object.assign(request, { headers: {} });
|
||||||
const amzDate = convertUTCtoISO8601(Date.now());
|
const amzDate = convertUTCtoISO8601(Date.now());
|
||||||
|
@ -196,7 +212,7 @@ function generateV4Headers(
|
||||||
const timestamp = amzDate;
|
const timestamp = amzDate;
|
||||||
const algorithm = 'AWS4-HMAC-SHA256';
|
const algorithm = 'AWS4-HMAC-SHA256';
|
||||||
|
|
||||||
let payload = '';
|
payload = payload || '';
|
||||||
if (request.method === 'POST') {
|
if (request.method === 'POST') {
|
||||||
payload = queryString.stringify(data, undefined, undefined, {
|
payload = queryString.stringify(data, undefined, undefined, {
|
||||||
encodeURIComponent,
|
encodeURIComponent,
|
||||||
|
@ -207,6 +223,7 @@ function generateV4Headers(
|
||||||
request.setHeader('host', request._headers.host);
|
request.setHeader('host', request._headers.host);
|
||||||
request.setHeader('x-amz-date', amzDate);
|
request.setHeader('x-amz-date', amzDate);
|
||||||
request.setHeader('x-amz-content-sha256', payloadChecksum);
|
request.setHeader('x-amz-content-sha256', payloadChecksum);
|
||||||
|
request.setHeader('content-md5', generateContentMD5Header(request.path, payload));
|
||||||
|
|
||||||
if (sessionToken) {
|
if (sessionToken) {
|
||||||
request.setHeader('x-amz-security-token', sessionToken);
|
request.setHeader('x-amz-security-token', sessionToken);
|
||||||
|
@ -217,6 +234,7 @@ function generateV4Headers(
|
||||||
.filter(headerName =>
|
.filter(headerName =>
|
||||||
headerName.startsWith('x-amz-')
|
headerName.startsWith('x-amz-')
|
||||||
|| headerName.startsWith('x-scal-')
|
|| headerName.startsWith('x-scal-')
|
||||||
|
|| headerName === 'content-md5'
|
||||||
|| headerName === 'host',
|
|| headerName === 'host',
|
||||||
).sort().join(';');
|
).sort().join(';');
|
||||||
const params = { request, signedHeaders, payloadChecksum,
|
const params = { request, signedHeaders, payloadChecksum,
|
||||||
|
|
|
@ -134,7 +134,7 @@ export default class ChainBackend extends BaseBackend {
|
||||||
}
|
}
|
||||||
|
|
||||||
const check = (policy) => {
|
const check = (policy) => {
|
||||||
const key = (policy.arn || '') + (policy.versionId || '');
|
const key = (policy.arn || '') + (policy.versionId || '') + (policy.action || '');
|
||||||
if (!policyMap[key] || !policyMap[key].isAllowed) {
|
if (!policyMap[key] || !policyMap[key].isAllowed) {
|
||||||
policyMap[key] = policy;
|
policyMap[key] = policy;
|
||||||
}
|
}
|
||||||
|
@ -158,6 +158,12 @@ export default class ChainBackend extends BaseBackend {
|
||||||
if (policyMap[key].versionId) {
|
if (policyMap[key].versionId) {
|
||||||
policyRes.versionId = policyMap[key].versionId;
|
policyRes.versionId = policyMap[key].versionId;
|
||||||
}
|
}
|
||||||
|
if (policyMap[key].isImplicit !== undefined) {
|
||||||
|
policyRes.isImplicit = policyMap[key].isImplicit;
|
||||||
|
}
|
||||||
|
if (policyMap[key].action) {
|
||||||
|
policyRes.action = policyMap[key].action;
|
||||||
|
}
|
||||||
return policyRes;
|
return policyRes;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -206,4 +212,22 @@ export default class ChainBackend extends BaseBackend {
|
||||||
return callback(null, res);
|
return callback(null, res);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
report(reqUid: string, callback: any) {
|
||||||
|
this._forEachClient((client, done) =>
|
||||||
|
client.report(reqUid, done),
|
||||||
|
(err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
const mergedRes = res.reduce((acc, val) => {
|
||||||
|
Object.keys(val).forEach(k => {
|
||||||
|
acc[k] = val[k];
|
||||||
|
});
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
return callback(null, mergedRes);
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -161,6 +161,10 @@ class InMemoryBackend extends BaseBackend {
|
||||||
};
|
};
|
||||||
return cb(null, vaultReturnObject);
|
return cb(null, vaultReturnObject);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
report(log: Logger, callback: any) {
|
||||||
|
return callback(null, {});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,569 @@
|
||||||
|
import cluster, { Worker } from 'cluster';
|
||||||
|
import * as werelogs from 'werelogs';
|
||||||
|
|
||||||
|
import { default as errors } from '../../lib/errors';
|
||||||
|
|
||||||
|
const rpcLogger = new werelogs.Logger('ClusterRPC');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remote procedure calls support between cluster workers.
|
||||||
|
*
|
||||||
|
* When using the cluster module, new processes are forked and are
|
||||||
|
* dispatched workloads, usually HTTP requests. The ClusterRPC module
|
||||||
|
* implements a RPC system to send commands to all cluster worker
|
||||||
|
* processes at once from any particular worker, and retrieve their
|
||||||
|
* individual command results, like a distributed map operation.
|
||||||
|
*
|
||||||
|
* The existing nodejs cluster IPC channel is setup from the primary
|
||||||
|
* to each worker, but not between workers, so there has to be a hop
|
||||||
|
* by the primary.
|
||||||
|
*
|
||||||
|
* How a command is treated:
|
||||||
|
*
|
||||||
|
* - a worker sends a command message to the primary
|
||||||
|
*
|
||||||
|
* - the primary then forwards that command to each existing worker
|
||||||
|
* (including the requestor)
|
||||||
|
*
|
||||||
|
* - each worker then executes the command and returns a result or an
|
||||||
|
* error
|
||||||
|
*
|
||||||
|
* - the primary gathers all workers results into an array
|
||||||
|
*
|
||||||
|
* - finally, the primary dispatches the results array to the original
|
||||||
|
* requesting worker
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* Limitations:
|
||||||
|
*
|
||||||
|
* - The command payload must be serializable, which means that:
|
||||||
|
* - it should not contain circular references
|
||||||
|
* - it should be of a reasonable size to be sent in a single RPC message
|
||||||
|
*
|
||||||
|
* - The "toWorkers" parameter of value "*" targets the set of workers
|
||||||
|
* that are available at the time the command is dispatched. Any new
|
||||||
|
* worker spawned after the command has been dispatched for
|
||||||
|
* processing, but before the command completes, don't execute
|
||||||
|
* the command and hence are not part of the results array.
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* To set it up:
|
||||||
|
*
|
||||||
|
* - On the primary:
|
||||||
|
* if (cluster.isPrimary) {
|
||||||
|
* setupRPCPrimary();
|
||||||
|
* }
|
||||||
|
*
|
||||||
|
* - On the workers:
|
||||||
|
* if (!cluster.isPrimary) {
|
||||||
|
* setupRPCWorker({
|
||||||
|
* handler1: (payload: object, uids: string, callback: HandlerCallback) => void,
|
||||||
|
* handler2: ...
|
||||||
|
* });
|
||||||
|
* }
|
||||||
|
* Handler functions will be passed the command payload, request
|
||||||
|
* serialized uids, and must call the callback when the worker is done
|
||||||
|
* processing the command:
|
||||||
|
* callback(error: Error | null | undefined, result?: any)
|
||||||
|
*
|
||||||
|
* When this setup is done, any worker can start sending commands by calling
|
||||||
|
* the async function sendWorkerCommand().
|
||||||
|
*/
|
||||||
|
|
||||||
|
// exported types
|
||||||
|
|
||||||
|
export type ResultObject = {
|
||||||
|
error: Error | null;
|
||||||
|
result: any;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* saved Promise for sendWorkerCommand
|
||||||
|
*/
|
||||||
|
export type CommandPromise = {
|
||||||
|
resolve: (results?: ResultObject[]) => void;
|
||||||
|
reject: (error: Error) => void;
|
||||||
|
timeout: NodeJS.Timeout | null;
|
||||||
|
};
|
||||||
|
export type HandlerCallback = (error: (Error & { code?: number }) | null | undefined, result?: any) => void;
|
||||||
|
export type HandlerFunction = (payload: object, uids: string, callback: HandlerCallback) => void;
|
||||||
|
export type HandlersMap = {
|
||||||
|
[index: string]: HandlerFunction;
|
||||||
|
};
|
||||||
|
export type PrimaryHandlerFunction = (worker: Worker, payload: object, uids: string, callback: HandlerCallback) => void;
|
||||||
|
export type PrimaryHandlersMap = Record<string, PrimaryHandlerFunction>;
|
||||||
|
|
||||||
|
// private types
|
||||||
|
|
||||||
|
type RPCMessage<T extends string, P> = {
|
||||||
|
type: T;
|
||||||
|
uids: string;
|
||||||
|
payload: P;
|
||||||
|
};
|
||||||
|
|
||||||
|
type RPCCommandMessage = RPCMessage<'cluster-rpc:command', any> & {
|
||||||
|
toWorkers: string;
|
||||||
|
toHandler: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type MarshalledResultObject = {
|
||||||
|
error: string | null;
|
||||||
|
errorCode?: number;
|
||||||
|
result: any;
|
||||||
|
};
|
||||||
|
|
||||||
|
type RPCCommandResultMessage = RPCMessage<'cluster-rpc:commandResult', MarshalledResultObject>;
|
||||||
|
|
||||||
|
type RPCCommandResultsMessage = RPCMessage<'cluster-rpc:commandResults', {
|
||||||
|
results: MarshalledResultObject[];
|
||||||
|
}>;
|
||||||
|
|
||||||
|
type RPCCommandErrorMessage = RPCMessage<'cluster-rpc:commandError', {
|
||||||
|
error: string;
|
||||||
|
}>;
|
||||||
|
|
||||||
|
interface RPCSetupOptions {
|
||||||
|
/**
|
||||||
|
* As werelogs is not a peerDependency, arsenal and a parent project
|
||||||
|
* might have their own separate versions duplicated in dependencies.
|
||||||
|
* The config are therefore not shared.
|
||||||
|
* Use this to propagate werelogs config to arsenal's ClusterRPC.
|
||||||
|
*/
|
||||||
|
werelogsConfig?: Parameters<typeof werelogs.configure>[0];
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* In primary: store worker IDs that are waiting to be dispatched
|
||||||
|
* their command's results, as a mapping.
|
||||||
|
*/
|
||||||
|
const uidsToWorkerId: {
|
||||||
|
[index: string]: number;
|
||||||
|
} = {};
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* In primary: store worker responses for commands in progress as a
|
||||||
|
* mapping.
|
||||||
|
*
|
||||||
|
* Result objects are 'null' while the worker is still processing the
|
||||||
|
* command. When a worker finishes processing it stores the result as:
|
||||||
|
* {
|
||||||
|
* error: string | null,
|
||||||
|
* result: any
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
const uidsToCommandResults: {
|
||||||
|
[index: string]: {
|
||||||
|
[index: number]: MarshalledResultObject | null;
|
||||||
|
};
|
||||||
|
} = {};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* In workers: store promise callbacks for commands waiting to be
|
||||||
|
* dispatched, as a mapping.
|
||||||
|
*/
|
||||||
|
const uidsToCommandPromise: {
|
||||||
|
[index: string]: CommandPromise;
|
||||||
|
} = {};
|
||||||
|
|
||||||
|
|
||||||
|
function _isRpcMessage(message) {
|
||||||
|
return (message !== null &&
|
||||||
|
typeof message === 'object' &&
|
||||||
|
typeof message.type === 'string' &&
|
||||||
|
message.type.startsWith('cluster-rpc:'));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup cluster RPC system on the primary
|
||||||
|
*
|
||||||
|
* @param {object} [handlers] - mapping of handler names to handler functions
|
||||||
|
* handler function:
|
||||||
|
* `handler({Worker} worker, {object} payload, {string} uids, {function} callback)`
|
||||||
|
* handler callback must be called when worker is done with the command:
|
||||||
|
* `callback({Error|null} error, {any} [result])`
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
export function setupRPCPrimary(handlers?: PrimaryHandlersMap, options?: RPCSetupOptions) {
|
||||||
|
if (options?.werelogsConfig) {
|
||||||
|
werelogs.configure(options.werelogsConfig);
|
||||||
|
}
|
||||||
|
cluster.on('message', (worker, message) => {
|
||||||
|
if (_isRpcMessage(message)) {
|
||||||
|
_handlePrimaryMessage(worker, message, handlers);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup RPCs on a cluster worker process
|
||||||
|
*
|
||||||
|
* @param {object} handlers - mapping of handler names to handler functions
|
||||||
|
* handler function:
|
||||||
|
* handler({object} payload, {string} uids, {function} callback)
|
||||||
|
* handler callback must be called when worker is done with the command:
|
||||||
|
* callback({Error|null} error, {any} [result])
|
||||||
|
* @return {undefined}
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
export function setupRPCWorker(handlers: HandlersMap, options?: RPCSetupOptions) {
|
||||||
|
if (!process.send) {
|
||||||
|
throw new Error('fatal: cannot setup cluster RPC: "process.send" is not available');
|
||||||
|
}
|
||||||
|
if (options?.werelogsConfig) {
|
||||||
|
werelogs.configure(options.werelogsConfig);
|
||||||
|
}
|
||||||
|
process.on('message', (message: RPCCommandMessage | RPCCommandResultsMessage) => {
|
||||||
|
if (_isRpcMessage(message)) {
|
||||||
|
_handleWorkerMessage(message, handlers);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send a command for workers to execute in parallel, and wait for results
|
||||||
|
*
|
||||||
|
* @param {string} toWorkers - which workers should execute the command
|
||||||
|
* Currently the supported values are:
|
||||||
|
* - "*", meaning all workers will execute the command
|
||||||
|
* - "PRIMARY", meaning primary process will execute the command
|
||||||
|
* @param {string} toHandler - name of handler that will execute the
|
||||||
|
* command in workers, as declared in setupRPCWorker() parameter object
|
||||||
|
* @param {string} uids - unique identifier of the command, must be
|
||||||
|
* unique across all commands in progress
|
||||||
|
* @param {object} payload - message payload, sent as-is to the handler
|
||||||
|
* @param {number} [timeoutMs=60000] - timeout the command with a
|
||||||
|
* "RequestTimeout" error after this number of milliseconds - set to 0
|
||||||
|
* to disable timeouts (the command may then hang forever)
|
||||||
|
* @returns {Promise}
|
||||||
|
*/
|
||||||
|
export async function sendWorkerCommand(
|
||||||
|
toWorkers: string,
|
||||||
|
toHandler: string,
|
||||||
|
uids: string,
|
||||||
|
payload: object,
|
||||||
|
timeoutMs: number = 60000
|
||||||
|
) {
|
||||||
|
if (typeof uids !== 'string') {
|
||||||
|
rpcLogger.error('missing or invalid "uids" field', { uids });
|
||||||
|
throw errors.MissingParameter;
|
||||||
|
}
|
||||||
|
if (uidsToCommandPromise[uids] !== undefined) {
|
||||||
|
rpcLogger.error('a command is already in progress with same uids', { uids });
|
||||||
|
throw errors.OperationAborted;
|
||||||
|
}
|
||||||
|
rpcLogger.info('sending command', { toWorkers, toHandler, uids, payload });
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
let timeout: NodeJS.Timeout | null = null;
|
||||||
|
if (timeoutMs) {
|
||||||
|
timeout = setTimeout(() => {
|
||||||
|
delete uidsToCommandPromise[uids];
|
||||||
|
reject(errors.RequestTimeout);
|
||||||
|
}, timeoutMs);
|
||||||
|
}
|
||||||
|
uidsToCommandPromise[uids] = { resolve, reject, timeout };
|
||||||
|
const message: RPCCommandMessage = {
|
||||||
|
type: 'cluster-rpc:command',
|
||||||
|
toWorkers,
|
||||||
|
toHandler,
|
||||||
|
uids,
|
||||||
|
payload,
|
||||||
|
};
|
||||||
|
return process.send?.(message);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the number of commands in flight
|
||||||
|
* @returns {number}
|
||||||
|
*/
|
||||||
|
export function getPendingCommandsCount() {
|
||||||
|
return Object.keys(uidsToCommandPromise).length;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function _dispatchCommandResultsToWorker(
|
||||||
|
worker: Worker,
|
||||||
|
uids: string,
|
||||||
|
resultsArray: MarshalledResultObject[]
|
||||||
|
): void {
|
||||||
|
const message: RPCCommandResultsMessage = {
|
||||||
|
type: 'cluster-rpc:commandResults',
|
||||||
|
uids,
|
||||||
|
payload: {
|
||||||
|
results: resultsArray,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
worker.send(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _dispatchCommandErrorToWorker(
|
||||||
|
worker: Worker,
|
||||||
|
uids: string,
|
||||||
|
error: Error,
|
||||||
|
): void {
|
||||||
|
const message: RPCCommandErrorMessage = {
|
||||||
|
type: 'cluster-rpc:commandError',
|
||||||
|
uids,
|
||||||
|
payload: {
|
||||||
|
error: error.message,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
worker.send(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _sendPrimaryCommandResult(
|
||||||
|
worker: Worker,
|
||||||
|
uids: string,
|
||||||
|
error: (Error & { code?: number }) | null | undefined,
|
||||||
|
result?: any
|
||||||
|
): void {
|
||||||
|
const message: RPCCommandResultsMessage = {
|
||||||
|
type: 'cluster-rpc:commandResults',
|
||||||
|
uids,
|
||||||
|
payload: {
|
||||||
|
results: [{ error: error?.message || null, errorCode: error?.code, result }],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
worker.send?.(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handlePrimaryCommandMessage(
|
||||||
|
fromWorker: Worker,
|
||||||
|
logger: any,
|
||||||
|
message: RPCCommandMessage,
|
||||||
|
handlers?: PrimaryHandlersMap
|
||||||
|
): void {
|
||||||
|
const { toWorkers, toHandler, uids, payload } = message;
|
||||||
|
if (toWorkers === '*') {
|
||||||
|
if (uidsToWorkerId[uids] !== undefined) {
|
||||||
|
logger.warn('new command already has a waiting worker with same uids', {
|
||||||
|
uids, workerId: uidsToWorkerId[uids],
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const commandResults = {};
|
||||||
|
for (const workerId of Object.keys(cluster.workers || {})) {
|
||||||
|
commandResults[workerId] = null;
|
||||||
|
}
|
||||||
|
uidsToWorkerId[uids] = fromWorker?.id;
|
||||||
|
uidsToCommandResults[uids] = commandResults;
|
||||||
|
|
||||||
|
for (const [workerId, worker] of Object.entries(cluster.workers || {})) {
|
||||||
|
logger.debug('sending command message to worker', {
|
||||||
|
workerId, toHandler, payload,
|
||||||
|
});
|
||||||
|
if (worker) {
|
||||||
|
worker.send(message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (toWorkers === 'PRIMARY') {
|
||||||
|
const { toHandler, uids, payload } = message;
|
||||||
|
const cb: HandlerCallback = (err, result) => _sendPrimaryCommandResult(fromWorker, uids, err, result);
|
||||||
|
|
||||||
|
if (toHandler in (handlers || {})) {
|
||||||
|
return handlers![toHandler](fromWorker, payload, uids, cb);
|
||||||
|
}
|
||||||
|
logger.error('no such handler in "toHandler" field from worker command message', {
|
||||||
|
toHandler,
|
||||||
|
});
|
||||||
|
return cb(errors.NotImplemented);
|
||||||
|
} else {
|
||||||
|
logger.error('unsupported "toWorkers" field from worker command message', {
|
||||||
|
toWorkers,
|
||||||
|
});
|
||||||
|
if (fromWorker) {
|
||||||
|
_dispatchCommandErrorToWorker(fromWorker, uids, errors.NotImplemented);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handlePrimaryCommandResultMessage(
|
||||||
|
fromWorkerId: number,
|
||||||
|
logger: any,
|
||||||
|
message: RPCCommandResultMessage
|
||||||
|
): void {
|
||||||
|
const { uids, payload } = message;
|
||||||
|
const commandResults = uidsToCommandResults[uids];
|
||||||
|
if (!commandResults) {
|
||||||
|
logger.warn('received command response message from worker for command not in flight', {
|
||||||
|
workerId: fromWorkerId,
|
||||||
|
uids,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (commandResults[fromWorkerId] === undefined) {
|
||||||
|
logger.warn('received command response message with unexpected worker ID', {
|
||||||
|
workerId: fromWorkerId,
|
||||||
|
uids,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (commandResults[fromWorkerId] !== null) {
|
||||||
|
logger.warn('ignoring duplicate command response from worker', {
|
||||||
|
workerId: fromWorkerId,
|
||||||
|
uids,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
commandResults[fromWorkerId] = payload;
|
||||||
|
const commandResultsArray = Object.values(commandResults);
|
||||||
|
if (commandResultsArray.every(response => response !== null)) {
|
||||||
|
logger.debug('all workers responded to command', { uids });
|
||||||
|
const completeCommandResultsArray = <MarshalledResultObject[]> commandResultsArray;
|
||||||
|
const toWorkerId = uidsToWorkerId[uids];
|
||||||
|
const toWorker = cluster.workers?.[toWorkerId];
|
||||||
|
|
||||||
|
delete uidsToCommandResults[uids];
|
||||||
|
delete uidsToWorkerId[uids];
|
||||||
|
|
||||||
|
if (!toWorker) {
|
||||||
|
logger.warn('worker shut down while its command was executing', {
|
||||||
|
workerId: toWorkerId, uids,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
// send back response to original worker
|
||||||
|
_dispatchCommandResultsToWorker(toWorker, uids, completeCommandResultsArray);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handlePrimaryMessage(
|
||||||
|
fromWorker: Worker,
|
||||||
|
message: RPCCommandMessage | RPCCommandResultMessage,
|
||||||
|
handlers?: PrimaryHandlersMap
|
||||||
|
): void {
|
||||||
|
const { type: messageType, uids } = message;
|
||||||
|
const logger = rpcLogger.newRequestLoggerFromSerializedUids(uids);
|
||||||
|
logger.debug('primary received message from worker', {
|
||||||
|
workerId: fromWorker?.id, rpcMessage: message,
|
||||||
|
});
|
||||||
|
if (messageType === 'cluster-rpc:command') {
|
||||||
|
return _handlePrimaryCommandMessage(fromWorker, logger, message, handlers);
|
||||||
|
}
|
||||||
|
if (messageType === 'cluster-rpc:commandResult') {
|
||||||
|
return _handlePrimaryCommandResultMessage(fromWorker?.id, logger, message);
|
||||||
|
}
|
||||||
|
logger.error('unsupported message type', {
|
||||||
|
workerId: fromWorker?.id, messageType, uids,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
function _sendWorkerCommandResult(
|
||||||
|
uids: string,
|
||||||
|
error: Error | null | undefined,
|
||||||
|
result?: any
|
||||||
|
): void {
|
||||||
|
const message: RPCCommandResultMessage = {
|
||||||
|
type: 'cluster-rpc:commandResult',
|
||||||
|
uids,
|
||||||
|
payload: {
|
||||||
|
error: error ? error.message : null,
|
||||||
|
result,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
process.send?.(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handleWorkerCommandMessage(
|
||||||
|
logger: any,
|
||||||
|
message: RPCCommandMessage,
|
||||||
|
handlers: HandlersMap
|
||||||
|
): void {
|
||||||
|
const { toHandler, uids, payload } = message;
|
||||||
|
const cb: HandlerCallback = (err, result) => _sendWorkerCommandResult(uids, err, result);
|
||||||
|
|
||||||
|
if (toHandler in handlers) {
|
||||||
|
return handlers[toHandler](payload, uids, cb);
|
||||||
|
}
|
||||||
|
logger.error('no such handler in "toHandler" field from worker command message', {
|
||||||
|
toHandler,
|
||||||
|
});
|
||||||
|
return cb(errors.NotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handleWorkerCommandResultsMessage(
|
||||||
|
logger: any,
|
||||||
|
message: RPCCommandResultsMessage,
|
||||||
|
): void {
|
||||||
|
const { uids, payload } = message;
|
||||||
|
const { results } = payload;
|
||||||
|
const commandPromise: CommandPromise = uidsToCommandPromise[uids];
|
||||||
|
if (commandPromise === undefined) {
|
||||||
|
logger.error('missing promise for command results', { uids, payload });
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (commandPromise.timeout) {
|
||||||
|
clearTimeout(commandPromise.timeout);
|
||||||
|
}
|
||||||
|
delete uidsToCommandPromise[uids];
|
||||||
|
const unmarshalledResults = results.map(workerResult => {
|
||||||
|
let workerError: Error | null = null;
|
||||||
|
if (workerResult.error) {
|
||||||
|
if (workerResult.error in errors) {
|
||||||
|
workerError = errors[workerResult.error];
|
||||||
|
} else {
|
||||||
|
workerError = new Error(workerResult.error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (workerError && workerResult.errorCode) {
|
||||||
|
(workerError as Error & { code: number }).code = workerResult.errorCode;
|
||||||
|
}
|
||||||
|
const unmarshalledResult: ResultObject = {
|
||||||
|
error: workerError,
|
||||||
|
result: workerResult.result,
|
||||||
|
};
|
||||||
|
return unmarshalledResult;
|
||||||
|
});
|
||||||
|
return commandPromise.resolve(unmarshalledResults);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handleWorkerCommandErrorMessage(
|
||||||
|
logger: any,
|
||||||
|
message: RPCCommandErrorMessage,
|
||||||
|
): void {
|
||||||
|
const { uids, payload } = message;
|
||||||
|
const { error } = payload;
|
||||||
|
const commandPromise: CommandPromise = uidsToCommandPromise[uids];
|
||||||
|
if (commandPromise === undefined) {
|
||||||
|
logger.error('missing promise for command results', { uids, payload });
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
if (commandPromise.timeout) {
|
||||||
|
clearTimeout(commandPromise.timeout);
|
||||||
|
}
|
||||||
|
delete uidsToCommandPromise[uids];
|
||||||
|
let commandError: Error | null = null;
|
||||||
|
if (error in errors) {
|
||||||
|
commandError = errors[error];
|
||||||
|
} else {
|
||||||
|
commandError = new Error(error);
|
||||||
|
}
|
||||||
|
return commandPromise.reject(<Error> commandError);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _handleWorkerMessage(
|
||||||
|
message: RPCCommandMessage | RPCCommandResultsMessage | RPCCommandErrorMessage,
|
||||||
|
handlers: HandlersMap
|
||||||
|
): void {
|
||||||
|
const { type: messageType, uids } = message;
|
||||||
|
const workerId = cluster.worker?.id;
|
||||||
|
const logger = rpcLogger.newRequestLoggerFromSerializedUids(uids);
|
||||||
|
logger.debug('worker received message from primary', {
|
||||||
|
workerId, rpcMessage: message,
|
||||||
|
});
|
||||||
|
if (messageType === 'cluster-rpc:command') {
|
||||||
|
return _handleWorkerCommandMessage(logger, message, handlers);
|
||||||
|
}
|
||||||
|
if (messageType === 'cluster-rpc:commandResults') {
|
||||||
|
return _handleWorkerCommandResultsMessage(logger, message);
|
||||||
|
}
|
||||||
|
if (messageType === 'cluster-rpc:commandError') {
|
||||||
|
return _handleWorkerCommandErrorMessage(logger, message);
|
||||||
|
}
|
||||||
|
logger.error('unsupported message type', {
|
||||||
|
workerId, messageType,
|
||||||
|
});
|
||||||
|
return undefined;
|
||||||
|
}
|
|
@ -141,6 +141,10 @@ export const supportedNotificationEvents = new Set([
|
||||||
's3:ObjectRestore:Post',
|
's3:ObjectRestore:Post',
|
||||||
's3:ObjectRestore:Completed',
|
's3:ObjectRestore:Completed',
|
||||||
's3:ObjectRestore:Delete',
|
's3:ObjectRestore:Delete',
|
||||||
|
's3:LifecycleTransition',
|
||||||
|
's3:LifecycleExpiration:*',
|
||||||
|
's3:LifecycleExpiration:DeleteMarkerCreated',
|
||||||
|
's3:LifecycleExpiration:Delete',
|
||||||
]);
|
]);
|
||||||
export const notificationArnPrefix = 'arn:scality:bucketnotif';
|
export const notificationArnPrefix = 'arn:scality:bucketnotif';
|
||||||
// HTTP server keep-alive timeout is set to a higher value than
|
// HTTP server keep-alive timeout is set to a higher value than
|
||||||
|
@ -167,3 +171,7 @@ export const maxCachedBuckets = process.env.METADATA_MAX_CACHED_BUCKETS ?
|
||||||
Number(process.env.METADATA_MAX_CACHED_BUCKETS) : 1000;
|
Number(process.env.METADATA_MAX_CACHED_BUCKETS) : 1000;
|
||||||
|
|
||||||
export const validRestoreObjectTiers = new Set(['Expedited', 'Standard', 'Bulk']);
|
export const validRestoreObjectTiers = new Set(['Expedited', 'Standard', 'Bulk']);
|
||||||
|
export const maxBatchingConcurrentOperations = 5;
|
||||||
|
|
||||||
|
/** For policy resource arn check we allow empty account ID to not break compatibility */
|
||||||
|
export const policyArnAllowedEmptyAccountId = ['utapi', 'scuba'];
|
||||||
|
|
|
@ -148,7 +148,7 @@ export class IndexTransaction {
|
||||||
'missing condition for conditional put'
|
'missing condition for conditional put'
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (typeof condition.notExists !== 'string') {
|
if (typeof condition.notExists !== 'string' && typeof condition.exists !== 'string') {
|
||||||
throw propError(
|
throw propError(
|
||||||
'unsupportedConditionalOperation',
|
'unsupportedConditionalOperation',
|
||||||
'missing key or supported condition'
|
'missing key or supported condition'
|
||||||
|
|
|
@ -690,6 +690,11 @@ export const ReportNotPresent: ErrorFormat = {
|
||||||
'The request was rejected because the credential report does not exist. To generate a credential report, use GenerateCredentialReport.',
|
'The request was rejected because the credential report does not exist. To generate a credential report, use GenerateCredentialReport.',
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const Found: ErrorFormat = {
|
||||||
|
code: 302,
|
||||||
|
description: 'Resource Found'
|
||||||
|
};
|
||||||
|
|
||||||
// ------------- Special non-AWS S3 errors -------------
|
// ------------- Special non-AWS S3 errors -------------
|
||||||
|
|
||||||
export const MPUinProgress: ErrorFormat = {
|
export const MPUinProgress: ErrorFormat = {
|
||||||
|
@ -1037,3 +1042,15 @@ export const AuthMethodNotImplemented: ErrorFormat = {
|
||||||
description: 'AuthMethodNotImplemented',
|
description: 'AuthMethodNotImplemented',
|
||||||
code: 501,
|
code: 501,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// --------------------- quotaErros ---------------------
|
||||||
|
|
||||||
|
export const NoSuchQuota: ErrorFormat = {
|
||||||
|
code: 404,
|
||||||
|
description: 'The specified resource does not have a quota.',
|
||||||
|
};
|
||||||
|
|
||||||
|
export const QuotaExceeded: ErrorFormat = {
|
||||||
|
code: 429,
|
||||||
|
description: 'The quota set for the resource is exceeded.',
|
||||||
|
};
|
||||||
|
|
|
@ -1,26 +1,19 @@
|
||||||
import promClient from 'prom-client';
|
import promClient from 'prom-client';
|
||||||
|
|
||||||
const collectDefaultMetricsIntervalMs =
|
|
||||||
process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined ?
|
|
||||||
Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10) :
|
|
||||||
10000;
|
|
||||||
|
|
||||||
promClient.collectDefaultMetrics({ timeout: collectDefaultMetricsIntervalMs });
|
|
||||||
|
|
||||||
export default class ZenkoMetrics {
|
export default class ZenkoMetrics {
|
||||||
static createCounter(params: promClient.CounterConfiguration) {
|
static createCounter(params: promClient.CounterConfiguration<string>) {
|
||||||
return new promClient.Counter(params);
|
return new promClient.Counter(params);
|
||||||
}
|
}
|
||||||
|
|
||||||
static createGauge(params: promClient.GaugeConfiguration) {
|
static createGauge(params: promClient.GaugeConfiguration<string>) {
|
||||||
return new promClient.Gauge(params);
|
return new promClient.Gauge(params);
|
||||||
}
|
}
|
||||||
|
|
||||||
static createHistogram(params: promClient.HistogramConfiguration) {
|
static createHistogram(params: promClient.HistogramConfiguration<string>) {
|
||||||
return new promClient.Histogram(params);
|
return new promClient.Histogram(params);
|
||||||
}
|
}
|
||||||
|
|
||||||
static createSummary(params: promClient.SummaryConfiguration) {
|
static createSummary(params: promClient.SummaryConfiguration<string>) {
|
||||||
return new promClient.Summary(params);
|
return new promClient.Summary(params);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -28,11 +21,15 @@ export default class ZenkoMetrics {
|
||||||
return promClient.register.getSingleMetric(name);
|
return promClient.register.getSingleMetric(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
static asPrometheus() {
|
static async asPrometheus() {
|
||||||
return promClient.register.metrics();
|
return promClient.register.metrics();
|
||||||
}
|
}
|
||||||
|
|
||||||
static asPrometheusContentType() {
|
static asPrometheusContentType() {
|
||||||
return promClient.register.contentType;
|
return promClient.register.contentType;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static collectDefaultMetrics() {
|
||||||
|
return promClient.collectDefaultMetrics();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import { RequestLogger } from 'werelogs';
|
||||||
|
|
||||||
import { legacyLocations } from '../constants';
|
import { legacyLocations } from '../constants';
|
||||||
import escapeForXml from '../s3middleware/escapeForXml';
|
import escapeForXml from '../s3middleware/escapeForXml';
|
||||||
|
|
||||||
|
|
|
@ -101,6 +101,7 @@ export default class BucketInfo {
|
||||||
_azureInfo: any | null;
|
_azureInfo: any | null;
|
||||||
_ingestion: { status: 'enabled' | 'disabled' } | null;
|
_ingestion: { status: 'enabled' | 'disabled' } | null;
|
||||||
_capabilities?: Capabilities;
|
_capabilities?: Capabilities;
|
||||||
|
_quotaMax: number | 0;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents all bucket information.
|
* Represents all bucket information.
|
||||||
|
@ -157,6 +158,7 @@ export default class BucketInfo {
|
||||||
* @param [notificationConfiguration] - bucket notification configuration
|
* @param [notificationConfiguration] - bucket notification configuration
|
||||||
* @param [tags] - bucket tag set
|
* @param [tags] - bucket tag set
|
||||||
* @param [capabilities] - capabilities for the bucket
|
* @param [capabilities] - capabilities for the bucket
|
||||||
|
* @param quotaMax - bucket quota
|
||||||
*/
|
*/
|
||||||
constructor(
|
constructor(
|
||||||
name: string,
|
name: string,
|
||||||
|
@ -185,6 +187,7 @@ export default class BucketInfo {
|
||||||
notificationConfiguration?: any,
|
notificationConfiguration?: any,
|
||||||
tags?: Array<BucketTag> | [],
|
tags?: Array<BucketTag> | [],
|
||||||
capabilities?: Capabilities,
|
capabilities?: Capabilities,
|
||||||
|
quotaMax?: number | 0,
|
||||||
) {
|
) {
|
||||||
assert.strictEqual(typeof name, 'string');
|
assert.strictEqual(typeof name, 'string');
|
||||||
assert.strictEqual(typeof owner, 'string');
|
assert.strictEqual(typeof owner, 'string');
|
||||||
|
@ -285,6 +288,10 @@ export default class BucketInfo {
|
||||||
tags = [] as BucketTag[];
|
tags = [] as BucketTag[];
|
||||||
}
|
}
|
||||||
assert.strictEqual(areTagsValid(tags), true);
|
assert.strictEqual(areTagsValid(tags), true);
|
||||||
|
if (quotaMax) {
|
||||||
|
assert.strictEqual(typeof quotaMax, 'number');
|
||||||
|
assert(quotaMax >= 0, 'Quota cannot be negative');
|
||||||
|
}
|
||||||
|
|
||||||
// IF UPDATING PROPERTIES, INCREMENT MODELVERSION NUMBER ABOVE
|
// IF UPDATING PROPERTIES, INCREMENT MODELVERSION NUMBER ABOVE
|
||||||
this._acl = aclInstance;
|
this._acl = aclInstance;
|
||||||
|
@ -313,6 +320,7 @@ export default class BucketInfo {
|
||||||
this._notificationConfiguration = notificationConfiguration || null;
|
this._notificationConfiguration = notificationConfiguration || null;
|
||||||
this._tags = tags;
|
this._tags = tags;
|
||||||
this._capabilities = capabilities || undefined;
|
this._capabilities = capabilities || undefined;
|
||||||
|
this._quotaMax = quotaMax || 0;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -348,6 +356,7 @@ export default class BucketInfo {
|
||||||
notificationConfiguration: this._notificationConfiguration,
|
notificationConfiguration: this._notificationConfiguration,
|
||||||
tags: this._tags,
|
tags: this._tags,
|
||||||
capabilities: this._capabilities,
|
capabilities: this._capabilities,
|
||||||
|
quotaMax: this._quotaMax,
|
||||||
};
|
};
|
||||||
const final = this._websiteConfiguration
|
const final = this._websiteConfiguration
|
||||||
? {
|
? {
|
||||||
|
@ -374,7 +383,7 @@ export default class BucketInfo {
|
||||||
obj.bucketPolicy, obj.uid, obj.readLocationConstraint, obj.isNFS,
|
obj.bucketPolicy, obj.uid, obj.readLocationConstraint, obj.isNFS,
|
||||||
obj.ingestion, obj.azureInfo, obj.objectLockEnabled,
|
obj.ingestion, obj.azureInfo, obj.objectLockEnabled,
|
||||||
obj.objectLockConfiguration, obj.notificationConfiguration, obj.tags,
|
obj.objectLockConfiguration, obj.notificationConfiguration, obj.tags,
|
||||||
obj.capabilities);
|
obj.capabilities, obj.quotaMax);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -401,7 +410,8 @@ export default class BucketInfo {
|
||||||
data._bucketPolicy, data._uid, data._readLocationConstraint,
|
data._bucketPolicy, data._uid, data._readLocationConstraint,
|
||||||
data._isNFS, data._ingestion, data._azureInfo,
|
data._isNFS, data._ingestion, data._azureInfo,
|
||||||
data._objectLockEnabled, data._objectLockConfiguration,
|
data._objectLockEnabled, data._objectLockConfiguration,
|
||||||
data._notificationConfiguration, data._tags, data._capabilities);
|
data._notificationConfiguration, data._tags, data._capabilities,
|
||||||
|
data._quotaMax);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -939,4 +949,22 @@ export default class BucketInfo {
|
||||||
this._capabilities = capabilities;
|
this._capabilities = capabilities;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the bucket quota information
|
||||||
|
* @return quotaMax
|
||||||
|
*/
|
||||||
|
getQuota() {
|
||||||
|
return this._quotaMax;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set bucket quota
|
||||||
|
* @param quota - quota to be set
|
||||||
|
* @return - bucket quota info
|
||||||
|
*/
|
||||||
|
setQuota(quota: number) {
|
||||||
|
this._quotaMax = quota || 0;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -666,13 +666,38 @@ export default class LifecycleConfiguration {
|
||||||
* @return Returns an error object or `null`
|
* @return Returns an error object or `null`
|
||||||
*/
|
*/
|
||||||
_checkDate(date: string) {
|
_checkDate(date: string) {
|
||||||
const isoRegex = new RegExp('^(-?(?:[1-9][0-9]*)?[0-9]{4})-' +
|
const isoRegex = new RegExp(
|
||||||
'(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9])' +
|
"^(-?(?:[1-9][0-9]*)?[0-9]{4})" + // Year
|
||||||
':([0-5][0-9]):([0-5][0-9])(.[0-9]+)?(Z)?$');
|
"-(1[0-2]|0[1-9])" + // Month
|
||||||
if (!isoRegex.test(date)) {
|
"-(3[01]|0[1-9]|[12][0-9])" + // Day
|
||||||
|
"T(2[0-3]|[01][0-9])" + // Hour
|
||||||
|
":([0-5][0-9])" + // Minute
|
||||||
|
":([0-5][0-9])" + // Second
|
||||||
|
"(\\.[0-9]+)?" + // Fractional second
|
||||||
|
"(Z|[+-][01][0-9]:[0-5][0-9])?$", // Timezone
|
||||||
|
"g"
|
||||||
|
);
|
||||||
|
const matches = [...date.matchAll(isoRegex)];
|
||||||
|
if (matches.length !== 1) {
|
||||||
const msg = 'Date must be in ISO 8601 format';
|
const msg = 'Date must be in ISO 8601 format';
|
||||||
return errors.InvalidArgument.customizeDescription(msg);
|
return errors.InvalidArgument.customizeDescription(msg);
|
||||||
}
|
}
|
||||||
|
// Check for a timezone in the last match group. If none, add a Z to indicate UTC.
|
||||||
|
if (!matches[0][matches[0].length-1]) {
|
||||||
|
date += 'Z';
|
||||||
|
}
|
||||||
|
const dateObj = new Date(date);
|
||||||
|
if (Number.isNaN(dateObj.getTime())) {
|
||||||
|
const msg = 'Date is not a valid date';
|
||||||
|
return errors.InvalidArgument.customizeDescription(msg);
|
||||||
|
}
|
||||||
|
if (dateObj.getUTCHours() !== 0
|
||||||
|
|| dateObj.getUTCMinutes() !== 0
|
||||||
|
|| dateObj.getUTCSeconds() !== 0
|
||||||
|
|| dateObj.getUTCMilliseconds() !== 0) {
|
||||||
|
const msg = '\'Date\' must be at midnight GMT';
|
||||||
|
return errors.InvalidArgument.customizeDescription(msg);
|
||||||
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -844,6 +869,7 @@ export default class LifecycleConfiguration {
|
||||||
* days: <value>,
|
* days: <value>,
|
||||||
* date: <value>,
|
* date: <value>,
|
||||||
* deleteMarker: <value>
|
* deleteMarker: <value>
|
||||||
|
* newerNoncurrentVersions: <value>,
|
||||||
* },
|
* },
|
||||||
* ],
|
* ],
|
||||||
* }
|
* }
|
||||||
|
@ -856,7 +882,8 @@ export default class LifecycleConfiguration {
|
||||||
actionName: string;
|
actionName: string;
|
||||||
days?: number;
|
days?: number;
|
||||||
date?: number;
|
date?: number;
|
||||||
deleteMarker?: boolean
|
deleteMarker?: boolean;
|
||||||
|
newerNoncurrentVersions?: number
|
||||||
}[];
|
}[];
|
||||||
} = {
|
} = {
|
||||||
propName: 'actions',
|
propName: 'actions',
|
||||||
|
@ -885,8 +912,14 @@ export default class LifecycleConfiguration {
|
||||||
if (action.error) {
|
if (action.error) {
|
||||||
actionsObj.error = action.error;
|
actionsObj.error = action.error;
|
||||||
} else {
|
} else {
|
||||||
const actionTimes = ['days', 'date', 'deleteMarker',
|
const actionTimes = [
|
||||||
'transition', 'nonCurrentVersionTransition'];
|
'days',
|
||||||
|
'date',
|
||||||
|
'deleteMarker',
|
||||||
|
'transition',
|
||||||
|
'nonCurrentVersionTransition',
|
||||||
|
'newerNoncurrentVersions'
|
||||||
|
];
|
||||||
actionTimes.forEach(t => {
|
actionTimes.forEach(t => {
|
||||||
if (action[t]) {
|
if (action[t]) {
|
||||||
// eslint-disable-next-line no-param-reassign
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
@ -1032,6 +1065,7 @@ export default class LifecycleConfiguration {
|
||||||
* nvExpObj = {
|
* nvExpObj = {
|
||||||
* error: <error>,
|
* error: <error>,
|
||||||
* days: <value>,
|
* days: <value>,
|
||||||
|
* newerNoncurrentVersions: <value>,
|
||||||
* }
|
* }
|
||||||
*/
|
*/
|
||||||
_parseNoncurrentVersionExpiration(rule: any) {
|
_parseNoncurrentVersionExpiration(rule: any) {
|
||||||
|
@ -1042,14 +1076,41 @@ export default class LifecycleConfiguration {
|
||||||
'NoncurrentDays');
|
'NoncurrentDays');
|
||||||
return { error };
|
return { error };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const actionParams: {
|
||||||
|
error?: ArsenalError;
|
||||||
|
days: number;
|
||||||
|
newerNoncurrentVersions: number;
|
||||||
|
} = {
|
||||||
|
days: 0,
|
||||||
|
newerNoncurrentVersions: 0,
|
||||||
|
};
|
||||||
|
|
||||||
const daysInt = parseInt(subNVExp.NoncurrentDays[0], 10);
|
const daysInt = parseInt(subNVExp.NoncurrentDays[0], 10);
|
||||||
if (daysInt < 1) {
|
if (daysInt < 1) {
|
||||||
const msg = 'NoncurrentDays is not a positive integer';
|
const msg = 'NoncurrentDays is not a positive integer';
|
||||||
const error = errors.InvalidArgument.customizeDescription(msg);
|
const error = errors.InvalidArgument.customizeDescription(msg);
|
||||||
return { error };
|
return { error };
|
||||||
} else {
|
} else {
|
||||||
return { days: daysInt };
|
actionParams.days = daysInt;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (subNVExp.NewerNoncurrentVersions) {
|
||||||
|
const newerVersionsInt = parseInt(subNVExp.NewerNoncurrentVersions[0], 10);
|
||||||
|
|
||||||
|
if (Number.isNaN(newerVersionsInt) || newerVersionsInt < 1) {
|
||||||
|
const msg = 'NewerNoncurrentVersions is not a positive integer';
|
||||||
|
const error = errors.InvalidArgument.customizeDescription(msg);
|
||||||
|
return { error };
|
||||||
|
}
|
||||||
|
|
||||||
|
actionParams.newerNoncurrentVersions = newerVersionsInt;
|
||||||
|
|
||||||
|
} else {
|
||||||
|
actionParams.newerNoncurrentVersions = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return actionParams;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1112,6 +1173,10 @@ export default class LifecycleConfiguration {
|
||||||
assert.strictEqual(typeof t.storageClass, 'string');
|
assert.strictEqual(typeof t.storageClass, 'string');
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (a.newerNoncurrentVersions) {
|
||||||
|
assert.strictEqual(typeof a.newerNoncurrentVersions, 'number');
|
||||||
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -1161,15 +1226,24 @@ export default class LifecycleConfiguration {
|
||||||
}
|
}
|
||||||
|
|
||||||
const Actions = actions.map(action => {
|
const Actions = actions.map(action => {
|
||||||
const { actionName, days, date, deleteMarker,
|
const {
|
||||||
nonCurrentVersionTransition, transition } = action;
|
actionName,
|
||||||
|
days,
|
||||||
|
date,
|
||||||
|
deleteMarker,
|
||||||
|
nonCurrentVersionTransition,
|
||||||
|
transition,
|
||||||
|
newerNoncurrentVersions,
|
||||||
|
} = action;
|
||||||
let Action: any;
|
let Action: any;
|
||||||
if (actionName === 'AbortIncompleteMultipartUpload') {
|
if (actionName === 'AbortIncompleteMultipartUpload') {
|
||||||
Action = `<${actionName}><DaysAfterInitiation>${days}` +
|
Action = `<${actionName}><DaysAfterInitiation>${days}` +
|
||||||
`</DaysAfterInitiation></${actionName}>`;
|
`</DaysAfterInitiation></${actionName}>`;
|
||||||
} else if (actionName === 'NoncurrentVersionExpiration') {
|
} else if (actionName === 'NoncurrentVersionExpiration') {
|
||||||
Action = `<${actionName}><NoncurrentDays>${days}` +
|
const Days = `<NoncurrentDays>${days}</NoncurrentDays>`;
|
||||||
`</NoncurrentDays></${actionName}>`;
|
const NewerVersions = newerNoncurrentVersions ?
|
||||||
|
`<NewerNoncurrentVersions>${newerNoncurrentVersions}</NewerNoncurrentVersions>` : '';
|
||||||
|
Action = `<${actionName}>${Days}${NewerVersions}</${actionName}>`;
|
||||||
} else if (actionName === 'Expiration') {
|
} else if (actionName === 'Expiration') {
|
||||||
const Days = days ? `<Days>${days}</Days>` : '';
|
const Days = days ? `<Days>${days}</Days>` : '';
|
||||||
const Date = date ? `<Date>${date}</Date>` : '';
|
const Date = date ? `<Date>${date}</Date>` : '';
|
||||||
|
@ -1246,13 +1320,18 @@ export default class LifecycleConfiguration {
|
||||||
}
|
}
|
||||||
|
|
||||||
actions.forEach(action => {
|
actions.forEach(action => {
|
||||||
const { actionName, days, date, deleteMarker } = action;
|
const { actionName, days, date, deleteMarker, newerNoncurrentVersions } = action;
|
||||||
if (actionName === 'AbortIncompleteMultipartUpload') {
|
if (actionName === 'AbortIncompleteMultipartUpload') {
|
||||||
entry.addAbortMPU(days!);
|
entry.addAbortMPU(days!);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (actionName === 'NoncurrentVersionExpiration') {
|
if (actionName === 'NoncurrentVersionExpiration') {
|
||||||
entry.addNCVExpiration(days!);
|
entry.addNCVExpiration('NoncurrentDays', days!);
|
||||||
|
|
||||||
|
if (newerNoncurrentVersions) {
|
||||||
|
entry.addNCVExpiration('NewerNoncurrentVersions', newerNoncurrentVersions!);
|
||||||
|
}
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (actionName === 'Expiration') {
|
if (actionName === 'Expiration') {
|
||||||
|
@ -1289,6 +1368,7 @@ export type Rule = {
|
||||||
days?: number;
|
days?: number;
|
||||||
date?: number;
|
date?: number;
|
||||||
deleteMarker?: boolean;
|
deleteMarker?: boolean;
|
||||||
|
newerNoncurrentVersions?: number;
|
||||||
nonCurrentVersionTransition?: {
|
nonCurrentVersionTransition?: {
|
||||||
noncurrentDays: number;
|
noncurrentDays: number;
|
||||||
storageClass: string;
|
storageClass: string;
|
||||||
|
|
|
@ -10,6 +10,10 @@ export type Expiration = {
|
||||||
Date?: number | boolean;
|
Date?: number | boolean;
|
||||||
Days?: number | boolean;
|
Days?: number | boolean;
|
||||||
};
|
};
|
||||||
|
export type NoncurrentExpiration = {
|
||||||
|
NoncurrentDays: number | null;
|
||||||
|
NewerNoncurrentVersions: number | null;
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @class LifecycleRule
|
* @class LifecycleRule
|
||||||
|
@ -21,7 +25,7 @@ export default class LifecycleRule {
|
||||||
status: Status;
|
status: Status;
|
||||||
tags: Tags;
|
tags: Tags;
|
||||||
expiration?: Expiration;
|
expiration?: Expiration;
|
||||||
ncvExpiration?: { NoncurrentDays: number };
|
ncvExpiration?: NoncurrentExpiration;
|
||||||
abortMPU?: { DaysAfterInitiation: number };
|
abortMPU?: { DaysAfterInitiation: number };
|
||||||
transitions?: any[];
|
transitions?: any[];
|
||||||
ncvTransitions?: any[];
|
ncvTransitions?: any[];
|
||||||
|
@ -39,7 +43,7 @@ export default class LifecycleRule {
|
||||||
ID: string;
|
ID: string;
|
||||||
Status: Status;
|
Status: Status;
|
||||||
Expiration?: Expiration;
|
Expiration?: Expiration;
|
||||||
NoncurrentVersionExpiration?: { NoncurrentDays: number };
|
NoncurrentVersionExpiration?: NoncurrentExpiration;
|
||||||
AbortIncompleteMultipartUpload?: { DaysAfterInitiation: number };
|
AbortIncompleteMultipartUpload?: { DaysAfterInitiation: number };
|
||||||
Transitions?: any[];
|
Transitions?: any[];
|
||||||
NoncurrentVersionTransitions?: any[];
|
NoncurrentVersionTransitions?: any[];
|
||||||
|
@ -51,7 +55,7 @@ export default class LifecycleRule {
|
||||||
rule.Expiration = this.expiration;
|
rule.Expiration = this.expiration;
|
||||||
}
|
}
|
||||||
if (this.ncvExpiration) {
|
if (this.ncvExpiration) {
|
||||||
rule.NoncurrentVersionExpiration = this.ncvExpiration;
|
rule.NoncurrentVersionExpiration = this.ncvExpiration
|
||||||
}
|
}
|
||||||
if (this.abortMPU) {
|
if (this.abortMPU) {
|
||||||
rule.AbortIncompleteMultipartUpload = this.abortMPU;
|
rule.AbortIncompleteMultipartUpload = this.abortMPU;
|
||||||
|
@ -141,15 +145,24 @@ export default class LifecycleRule {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* NoncurrentVersionExpiration
|
* NoncurrentVersionExpiration
|
||||||
* @param days - NoncurrentDays
|
* @param prop - Property must be defined in `validProps`
|
||||||
|
* @param value - integer for `NoncurrentDays` and `NewerNoncurrentVersions`
|
||||||
*/
|
*/
|
||||||
addNCVExpiration(days: number) {
|
addNCVExpiration(prop: 'NoncurrentDays' | 'NewerNoncurrentVersions', value: number): this;
|
||||||
this.ncvExpiration = { NoncurrentDays: days };
|
addNCVExpiration(prop: string, value: number) {
|
||||||
|
const validProps = ['NoncurrentDays', 'NewerNoncurrentVersions'];
|
||||||
|
if (validProps.includes(prop)) {
|
||||||
|
this.ncvExpiration = this.ncvExpiration || {
|
||||||
|
NoncurrentDays: null,
|
||||||
|
NewerNoncurrentVersions: null,
|
||||||
|
};
|
||||||
|
this.ncvExpiration[prop] = value;
|
||||||
|
}
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* AbortIncompleteMultipartUpload
|
* abortincompletemultipartupload
|
||||||
* @param days - DaysAfterInitiation
|
* @param days - DaysAfterInitiation
|
||||||
*/
|
*/
|
||||||
addAbortMPU(days: number) {
|
addAbortMPU(days: number) {
|
||||||
|
|
|
@ -58,13 +58,15 @@ export type ObjectMDData = {
|
||||||
'x-amz-server-side-encryption-customer-algorithm': string;
|
'x-amz-server-side-encryption-customer-algorithm': string;
|
||||||
'x-amz-website-redirect-location': string;
|
'x-amz-website-redirect-location': string;
|
||||||
'x-amz-scal-transition-in-progress'?: boolean;
|
'x-amz-scal-transition-in-progress'?: boolean;
|
||||||
|
'x-amz-scal-transition-time'?: string;
|
||||||
azureInfo?: any;
|
azureInfo?: any;
|
||||||
acl: ACL;
|
acl: ACL;
|
||||||
key: string;
|
key: string;
|
||||||
location: null | Location[];
|
location: null | Location[];
|
||||||
// versionId, isNull, nullVersionId and isDeleteMarker
|
// versionId, isNull, isNull2, nullVersionId and isDeleteMarker
|
||||||
// should be undefined when not set explicitly
|
// should be undefined when not set explicitly
|
||||||
isNull?: boolean;
|
isNull?: boolean;
|
||||||
|
isNull2?: boolean;
|
||||||
nullVersionId?: string;
|
nullVersionId?: string;
|
||||||
nullUploadId?: string;
|
nullUploadId?: string;
|
||||||
isDeleteMarker?: boolean;
|
isDeleteMarker?: boolean;
|
||||||
|
@ -209,6 +211,7 @@ export default class ObjectMD {
|
||||||
// versionId, isNull, nullVersionId and isDeleteMarker
|
// versionId, isNull, nullVersionId and isDeleteMarker
|
||||||
// should be undefined when not set explicitly
|
// should be undefined when not set explicitly
|
||||||
isNull: undefined,
|
isNull: undefined,
|
||||||
|
isNull2: undefined,
|
||||||
nullVersionId: undefined,
|
nullVersionId: undefined,
|
||||||
nullUploadId: undefined,
|
nullUploadId: undefined,
|
||||||
isDeleteMarker: undefined,
|
isDeleteMarker: undefined,
|
||||||
|
@ -647,10 +650,24 @@ export default class ObjectMD {
|
||||||
* Set metadata transition in progress value
|
* Set metadata transition in progress value
|
||||||
*
|
*
|
||||||
* @param inProgress - True if transition is in progress, false otherwise
|
* @param inProgress - True if transition is in progress, false otherwise
|
||||||
|
* @param transitionTime - Date when the transition started
|
||||||
* @return itself
|
* @return itself
|
||||||
*/
|
*/
|
||||||
setTransitionInProgress(inProgress: boolean) {
|
setTransitionInProgress(inProgress: false): this
|
||||||
|
setTransitionInProgress(inProgress: true, transitionTime: Date|string|number): this
|
||||||
|
setTransitionInProgress(inProgress: boolean, transitionTime?: Date|string|number) {
|
||||||
this._data['x-amz-scal-transition-in-progress'] = inProgress;
|
this._data['x-amz-scal-transition-in-progress'] = inProgress;
|
||||||
|
if (!inProgress || !transitionTime) {
|
||||||
|
delete this._data['x-amz-scal-transition-time'];
|
||||||
|
} else {
|
||||||
|
if (typeof transitionTime === 'number') {
|
||||||
|
transitionTime = new Date(transitionTime);
|
||||||
|
}
|
||||||
|
if (transitionTime instanceof Date) {
|
||||||
|
transitionTime = transitionTime.toISOString();
|
||||||
|
}
|
||||||
|
this._data['x-amz-scal-transition-time'] = transitionTime;
|
||||||
|
}
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -663,6 +680,14 @@ export default class ObjectMD {
|
||||||
return this._data['x-amz-scal-transition-in-progress'];
|
return this._data['x-amz-scal-transition-in-progress'];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the transition time of the object.
|
||||||
|
* @returns The transition time of the object.
|
||||||
|
*/
|
||||||
|
getTransitionTime() {
|
||||||
|
return this._data['x-amz-scal-transition-time'];
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set access control list
|
* Set access control list
|
||||||
*
|
*
|
||||||
|
@ -811,6 +836,31 @@ export default class ObjectMD {
|
||||||
return this._data.isNull || false;
|
return this._data.isNull || false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set metadata isNull2 value
|
||||||
|
*
|
||||||
|
* @param isNull2 - Whether new version is null or not AND has
|
||||||
|
* been put with a Cloudserver handling null keys (i.e. supporting
|
||||||
|
* S3C-7352)
|
||||||
|
|
||||||
|
* @return itself
|
||||||
|
*/
|
||||||
|
setIsNull2(isNull2: boolean) {
|
||||||
|
this._data.isNull2 = isNull2;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get metadata isNull2 value
|
||||||
|
*
|
||||||
|
* @return isNull2 - Whether new version is null or not AND has
|
||||||
|
* been put with a Cloudserver handling null keys (i.e. supporting
|
||||||
|
* S3C-7352)
|
||||||
|
*/
|
||||||
|
getIsNull2() {
|
||||||
|
return this._data.isNull2 || false;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set metadata nullVersionId value
|
* Set metadata nullVersionId value
|
||||||
*
|
*
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
import UUID from 'uuid';
|
import UUID from 'uuid';
|
||||||
|
|
||||||
|
import { RequestLogger } from 'werelogs';
|
||||||
|
|
||||||
import escapeForXml from '../s3middleware/escapeForXml';
|
import escapeForXml from '../s3middleware/escapeForXml';
|
||||||
import errors from '../errors';
|
import errors from '../errors';
|
||||||
import { isValidBucketName } from '../s3routes/routesUtils';
|
import { isValidBucketName } from '../s3routes/routesUtils';
|
||||||
|
|
|
@ -435,7 +435,6 @@ export default class Server {
|
||||||
this._server.on('connection', sock => {
|
this._server.on('connection', sock => {
|
||||||
// Setting no delay of the socket to the value configured
|
// Setting no delay of the socket to the value configured
|
||||||
// TODO fix this
|
// TODO fix this
|
||||||
// @ts-expect-errors
|
|
||||||
sock.setNoDelay(this.isNoDelay());
|
sock.setNoDelay(this.isNoDelay());
|
||||||
sock.on('error', err => this._logger.info(
|
sock.on('error', err => this._logger.info(
|
||||||
'socket error - request rejected', { error: err }));
|
'socket error - request rejected', { error: err }));
|
||||||
|
|
|
@ -62,7 +62,7 @@ export default class HealthProbeServer extends httpServer {
|
||||||
_onLiveness(
|
_onLiveness(
|
||||||
_req: http.IncomingMessage,
|
_req: http.IncomingMessage,
|
||||||
res: http.ServerResponse,
|
res: http.ServerResponse,
|
||||||
log: RequestLogger,
|
log: werelogs.RequestLogger,
|
||||||
) {
|
) {
|
||||||
if (this._livenessCheck(log)) {
|
if (this._livenessCheck(log)) {
|
||||||
sendSuccess(res, log);
|
sendSuccess(res, log);
|
||||||
|
@ -74,7 +74,7 @@ export default class HealthProbeServer extends httpServer {
|
||||||
_onReadiness(
|
_onReadiness(
|
||||||
_req: http.IncomingMessage,
|
_req: http.IncomingMessage,
|
||||||
res: http.ServerResponse,
|
res: http.ServerResponse,
|
||||||
log: RequestLogger,
|
log: werelogs.RequestLogger,
|
||||||
) {
|
) {
|
||||||
if (this._readinessCheck(log)) {
|
if (this._readinessCheck(log)) {
|
||||||
sendSuccess(res, log);
|
sendSuccess(res, log);
|
||||||
|
@ -84,10 +84,11 @@ export default class HealthProbeServer extends httpServer {
|
||||||
}
|
}
|
||||||
|
|
||||||
// expose metrics to Prometheus
|
// expose metrics to Prometheus
|
||||||
_onMetrics(_req: http.IncomingMessage, res: http.ServerResponse) {
|
async _onMetrics(_req: http.IncomingMessage, res: http.ServerResponse) {
|
||||||
|
const metrics = await ZenkoMetrics.asPrometheus();
|
||||||
res.writeHead(200, {
|
res.writeHead(200, {
|
||||||
'Content-Type': ZenkoMetrics.asPrometheusContentType(),
|
'Content-Type': ZenkoMetrics.asPrometheusContentType(),
|
||||||
});
|
});
|
||||||
res.end(ZenkoMetrics.asPrometheus());
|
res.end(metrics);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,7 +16,7 @@ export const DEFAULT_METRICS_ROUTE = '/metrics';
|
||||||
* @param log - Werelogs instance for logging if you choose to
|
* @param log - Werelogs instance for logging if you choose to
|
||||||
*/
|
*/
|
||||||
|
|
||||||
export type ProbeDelegate = (res: http.ServerResponse, log: RequestLogger) => string | void
|
export type ProbeDelegate = (res: http.ServerResponse, log: werelogs.RequestLogger) => string | void
|
||||||
|
|
||||||
export type ProbeServerParams = {
|
export type ProbeServerParams = {
|
||||||
port: number;
|
port: number;
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
|
|
||||||
|
import { RequestLogger } from 'werelogs';
|
||||||
|
|
||||||
import { ArsenalError } from '../../errors';
|
import { ArsenalError } from '../../errors';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -119,7 +119,7 @@ export default class RESTClient {
|
||||||
method: string,
|
method: string,
|
||||||
headers: http.OutgoingHttpHeaders | null,
|
headers: http.OutgoingHttpHeaders | null,
|
||||||
key: string | null,
|
key: string | null,
|
||||||
log: RequestLogger,
|
log: werelogs.RequestLogger,
|
||||||
responseCb: (res: http.IncomingMessage) => void,
|
responseCb: (res: http.IncomingMessage) => void,
|
||||||
) {
|
) {
|
||||||
const reqHeaders = headers || {};
|
const reqHeaders = headers || {};
|
||||||
|
|
|
@ -25,7 +25,7 @@ function setContentRange(
|
||||||
|
|
||||||
function sendError(
|
function sendError(
|
||||||
res: http.ServerResponse,
|
res: http.ServerResponse,
|
||||||
log: RequestLogger,
|
log: werelogs.RequestLogger,
|
||||||
error: ArsenalError,
|
error: ArsenalError,
|
||||||
optMessage?: string,
|
optMessage?: string,
|
||||||
) {
|
) {
|
||||||
|
@ -141,7 +141,7 @@ export default class RESTServer extends httpServer {
|
||||||
_onPut(
|
_onPut(
|
||||||
req: http.IncomingMessage,
|
req: http.IncomingMessage,
|
||||||
res: http.ServerResponse,
|
res: http.ServerResponse,
|
||||||
log: RequestLogger,
|
log: werelogs.RequestLogger,
|
||||||
) {
|
) {
|
||||||
let size: number;
|
let size: number;
|
||||||
try {
|
try {
|
||||||
|
@ -183,7 +183,7 @@ export default class RESTServer extends httpServer {
|
||||||
_onGet(
|
_onGet(
|
||||||
req: http.IncomingMessage,
|
req: http.IncomingMessage,
|
||||||
res: http.ServerResponse,
|
res: http.ServerResponse,
|
||||||
log: RequestLogger,
|
log: werelogs.RequestLogger,
|
||||||
) {
|
) {
|
||||||
let pathInfo: ReturnType<typeof parseURL>;
|
let pathInfo: ReturnType<typeof parseURL>;
|
||||||
let rangeSpec: ReturnType<typeof httpUtils.parseRangeSpec> | undefined =
|
let rangeSpec: ReturnType<typeof httpUtils.parseRangeSpec> | undefined =
|
||||||
|
@ -266,7 +266,7 @@ export default class RESTServer extends httpServer {
|
||||||
_onDelete(
|
_onDelete(
|
||||||
req: http.IncomingMessage,
|
req: http.IncomingMessage,
|
||||||
res: http.ServerResponse,
|
res: http.ServerResponse,
|
||||||
log: RequestLogger,
|
log: werelogs.RequestLogger,
|
||||||
) {
|
) {
|
||||||
let pathInfo: ReturnType<typeof parseURL>;
|
let pathInfo: ReturnType<typeof parseURL>;
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import ioClient from 'socket.io-client';
|
import ioClient from 'socket.io-client';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
import io from 'socket.io';
|
import { Server as IOServer } from 'socket.io';
|
||||||
import * as sioStream from './sio-stream';
|
import * as sioStream from './sio-stream';
|
||||||
import async from 'async';
|
import async from 'async';
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
|
@ -497,7 +497,7 @@ export function RPCServer(params: {
|
||||||
assert(params.logger);
|
assert(params.logger);
|
||||||
|
|
||||||
const httpServer = http.createServer();
|
const httpServer = http.createServer();
|
||||||
const server = io(httpServer);
|
const server = new IOServer(httpServer, { maxHttpBufferSize: 1e8 });
|
||||||
const log = params.logger;
|
const log = params.logger;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -508,7 +508,7 @@ export function RPCServer(params: {
|
||||||
*
|
*
|
||||||
* @param {BaseService} serviceList - list of services to register
|
* @param {BaseService} serviceList - list of services to register
|
||||||
*/
|
*/
|
||||||
server.registerServices = function registerServices(...serviceList: any[]) {
|
(server as any).registerServices = function registerServices(...serviceList: any[]) {
|
||||||
serviceList.forEach(service => {
|
serviceList.forEach(service => {
|
||||||
const sock = this.of(service.namespace);
|
const sock = this.of(service.namespace);
|
||||||
sock.on('connection', conn => {
|
sock.on('connection', conn => {
|
||||||
|
@ -536,7 +536,7 @@ export function RPCServer(params: {
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
server.listen = function listen(port, bindAddress = undefined) {
|
(server as any).listen = function listen(port, bindAddress = undefined) {
|
||||||
httpServer.listen(port, bindAddress);
|
httpServer.listen(port, bindAddress);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,7 @@
|
||||||
},
|
},
|
||||||
"principalAWSUserArn": {
|
"principalAWSUserArn": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"pattern": "^arn:aws:iam::[0-9]{12}:user/(?!\\*)[\\w+=,.@ -/]{1,64}$"
|
"pattern": "^arn:aws:iam::[0-9]{12}:user/(?!\\*)[\\w+=,.@ -/]{1,2017}$"
|
||||||
},
|
},
|
||||||
"principalAWSRoleArn": {
|
"principalAWSRoleArn": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
|
@ -360,6 +360,9 @@
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"const": "2012-10-17"
|
"const": "2012-10-17"
|
||||||
},
|
},
|
||||||
|
"Id": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
"Statement": {
|
"Statement": {
|
||||||
"oneOf": [
|
"oneOf": [
|
||||||
{
|
{
|
||||||
|
|
|
@ -28,7 +28,7 @@
|
||||||
},
|
},
|
||||||
"principalAWSUserArn": {
|
"principalAWSUserArn": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"pattern": "^arn:aws:iam::[0-9]{12}:user/(?!\\*)[\\w+=,.@ -/]{1,64}$"
|
"pattern": "^arn:aws:iam::[0-9]{12}:user/(?!\\*)[\\w+=,.@ -/]{1,2017}$"
|
||||||
},
|
},
|
||||||
"principalAWSRoleArn": {
|
"principalAWSRoleArn": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
|
|
|
@ -12,13 +12,39 @@ import {
|
||||||
actionMapSSO,
|
actionMapSSO,
|
||||||
actionMapSTS,
|
actionMapSTS,
|
||||||
actionMapMetadata,
|
actionMapMetadata,
|
||||||
|
actionMapScuba,
|
||||||
} from './utils/actionMaps';
|
} from './utils/actionMaps';
|
||||||
|
|
||||||
const _actionNeedQuotaCheck = {
|
export const actionNeedQuotaCheck = {
|
||||||
objectPut: true,
|
objectPut: true,
|
||||||
|
objectPutVersion: true,
|
||||||
objectPutPart: true,
|
objectPutPart: true,
|
||||||
|
objectRestore: true,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This variable describes APIs that change the bytes
|
||||||
|
* stored, requiring quota updates
|
||||||
|
*/
|
||||||
|
export const actionWithDataDeletion = {
|
||||||
|
objectDelete: true,
|
||||||
|
objectDeleteVersion: true,
|
||||||
|
multipartDelete: true,
|
||||||
|
multiObjectDelete: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The function returns true if the current API call is a copy object
|
||||||
|
* and the action requires a quota evaluation logic, post retrieval
|
||||||
|
* of the object metadata.
|
||||||
|
* @param {string} action - the action being performed
|
||||||
|
* @param {string} currentApi - the current API being called
|
||||||
|
* @return {boolean} - whether the action requires a quota check
|
||||||
|
*/
|
||||||
|
export function actionNeedQuotaCheckCopy(action: string, currentApi: string) {
|
||||||
|
return action === 'objectGet' && (currentApi === 'objectCopy' || currentApi === 'objectPutCopyPart');
|
||||||
|
}
|
||||||
|
|
||||||
function _findAction(service: string, method: string) {
|
function _findAction(service: string, method: string) {
|
||||||
switch (service) {
|
switch (service) {
|
||||||
case 's3':
|
case 's3':
|
||||||
|
@ -36,6 +62,8 @@ function _findAction(service: string, method: string) {
|
||||||
return actionMapSTS[method];
|
return actionMapSTS[method];
|
||||||
case 'metadata':
|
case 'metadata':
|
||||||
return actionMapMetadata[method];
|
return actionMapMetadata[method];
|
||||||
|
case 'scuba':
|
||||||
|
return actionMapScuba[method];
|
||||||
default:
|
default:
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
@ -105,6 +133,10 @@ function _buildArn(
|
||||||
return `arn:scality:metadata::${requesterInfo!.accountid}:` +
|
return `arn:scality:metadata::${requesterInfo!.accountid}:` +
|
||||||
`${generalResource}/`;
|
`${generalResource}/`;
|
||||||
}
|
}
|
||||||
|
case 'scuba': {
|
||||||
|
return `arn:scality:scuba::${requesterInfo!.accountid}:` +
|
||||||
|
`${generalResource}${specificResource ? '/' + specificResource : ''}`;
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
@ -173,6 +205,7 @@ export default class RequestContext {
|
||||||
_needTagEval: boolean;
|
_needTagEval: boolean;
|
||||||
_foundAction?: string;
|
_foundAction?: string;
|
||||||
_foundResource?: string;
|
_foundResource?: string;
|
||||||
|
_objectLockRetentionDays?: number | null;
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
headers: { [key: string]: string | string[] },
|
headers: { [key: string]: string | string[] },
|
||||||
|
@ -194,6 +227,7 @@ export default class RequestContext {
|
||||||
requestObjTags?: string,
|
requestObjTags?: string,
|
||||||
existingObjTag?: string,
|
existingObjTag?: string,
|
||||||
needTagEval?: false,
|
needTagEval?: false,
|
||||||
|
objectLockRetentionDays?: number,
|
||||||
) {
|
) {
|
||||||
this._headers = headers;
|
this._headers = headers;
|
||||||
this._query = query;
|
this._query = query;
|
||||||
|
@ -222,10 +256,12 @@ export default class RequestContext {
|
||||||
this._securityToken = securityToken;
|
this._securityToken = securityToken;
|
||||||
this._policyArn = policyArn;
|
this._policyArn = policyArn;
|
||||||
this._action = action;
|
this._action = action;
|
||||||
this._needQuota = _actionNeedQuotaCheck[apiMethod] === true;
|
this._needQuota = actionNeedQuotaCheck[apiMethod] === true
|
||||||
|
|| actionWithDataDeletion[apiMethod] === true;
|
||||||
this._requestObjTags = requestObjTags || null;
|
this._requestObjTags = requestObjTags || null;
|
||||||
this._existingObjTag = existingObjTag || null;
|
this._existingObjTag = existingObjTag || null;
|
||||||
this._needTagEval = needTagEval || false;
|
this._needTagEval = needTagEval || false;
|
||||||
|
this._objectLockRetentionDays = objectLockRetentionDays || null;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -257,6 +293,7 @@ export default class RequestContext {
|
||||||
requestObjTags: this._requestObjTags,
|
requestObjTags: this._requestObjTags,
|
||||||
existingObjTag: this._existingObjTag,
|
existingObjTag: this._existingObjTag,
|
||||||
needTagEval: this._needTagEval,
|
needTagEval: this._needTagEval,
|
||||||
|
objectLockRetentionDays: this._objectLockRetentionDays,
|
||||||
};
|
};
|
||||||
return JSON.stringify(requestInfo);
|
return JSON.stringify(requestInfo);
|
||||||
}
|
}
|
||||||
|
@ -297,6 +334,7 @@ export default class RequestContext {
|
||||||
obj.requestObjTags,
|
obj.requestObjTags,
|
||||||
obj.existingObjTag,
|
obj.existingObjTag,
|
||||||
obj.needTagEval,
|
obj.needTagEval,
|
||||||
|
obj.objectLockRetentionDays,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -700,4 +738,24 @@ export default class RequestContext {
|
||||||
getNeedTagEval() {
|
getNeedTagEval() {
|
||||||
return this._needTagEval;
|
return this._needTagEval;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get object lock retention days
|
||||||
|
*
|
||||||
|
* @returns objectLockRetentionDays - object lock retention days
|
||||||
|
*/
|
||||||
|
getObjectLockRetentionDays() {
|
||||||
|
return this._objectLockRetentionDays;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set object lock retention days
|
||||||
|
*
|
||||||
|
* @param objectLockRetentionDays - object lock retention days
|
||||||
|
* @returns itself
|
||||||
|
*/
|
||||||
|
setObjectLockRetentionDays(objectLockRetentionDays: number) {
|
||||||
|
this._objectLockRetentionDays = objectLockRetentionDays;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -310,6 +310,7 @@ export function evaluatePolicy(
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
* @deprecated Upgrade to standardEvaluateAllPolicies
|
||||||
* Evaluate whether a request is permitted under a policy.
|
* Evaluate whether a request is permitted under a policy.
|
||||||
* @param requestContext - Info necessary to
|
* @param requestContext - Info necessary to
|
||||||
* evaluate permission
|
* evaluate permission
|
||||||
|
@ -325,6 +326,16 @@ export function evaluateAllPolicies(
|
||||||
allPolicies: any[],
|
allPolicies: any[],
|
||||||
log: Logger,
|
log: Logger,
|
||||||
): string {
|
): string {
|
||||||
|
return standardEvaluateAllPolicies(requestContext, allPolicies, log).verdict;
|
||||||
|
}
|
||||||
|
export function standardEvaluateAllPolicies(
|
||||||
|
requestContext: RequestContext,
|
||||||
|
allPolicies: any[],
|
||||||
|
log: Logger,
|
||||||
|
): {
|
||||||
|
verdict: string;
|
||||||
|
isImplicit: boolean;
|
||||||
|
} {
|
||||||
log.trace('evaluating all policies');
|
log.trace('evaluating all policies');
|
||||||
let allow = false;
|
let allow = false;
|
||||||
let allowWithTagCondition = false;
|
let allowWithTagCondition = false;
|
||||||
|
@ -333,7 +344,10 @@ export function evaluateAllPolicies(
|
||||||
const singlePolicyVerdict = evaluatePolicy(requestContext, allPolicies[i], log);
|
const singlePolicyVerdict = evaluatePolicy(requestContext, allPolicies[i], log);
|
||||||
// If there is any Deny, just return Deny
|
// If there is any Deny, just return Deny
|
||||||
if (singlePolicyVerdict === 'Deny') {
|
if (singlePolicyVerdict === 'Deny') {
|
||||||
return 'Deny';
|
return {
|
||||||
|
verdict: 'Deny',
|
||||||
|
isImplicit: false,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
if (singlePolicyVerdict === 'Allow') {
|
if (singlePolicyVerdict === 'Allow') {
|
||||||
allow = true;
|
allow = true;
|
||||||
|
@ -344,6 +358,7 @@ export function evaluateAllPolicies(
|
||||||
} // else 'Neutral'
|
} // else 'Neutral'
|
||||||
}
|
}
|
||||||
let verdict;
|
let verdict;
|
||||||
|
let isImplicit = false;
|
||||||
if (allow) {
|
if (allow) {
|
||||||
if (denyWithTagCondition) {
|
if (denyWithTagCondition) {
|
||||||
verdict = 'NeedTagConditionEval';
|
verdict = 'NeedTagConditionEval';
|
||||||
|
@ -355,8 +370,9 @@ export function evaluateAllPolicies(
|
||||||
verdict = 'NeedTagConditionEval';
|
verdict = 'NeedTagConditionEval';
|
||||||
} else {
|
} else {
|
||||||
verdict = 'Deny';
|
verdict = 'Deny';
|
||||||
|
isImplicit = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
log.trace('result of evaluating all policies', { verdict });
|
log.trace('result of evaluating all policies', { verdict, isImplicit });
|
||||||
return verdict;
|
return { verdict, isImplicit };
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,6 +52,12 @@ const sharedActionMap = {
|
||||||
objectPutVersion: 's3:PutObjectVersion',
|
objectPutVersion: 's3:PutObjectVersion',
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const actionMapBucketQuotas = {
|
||||||
|
bucketGetQuota: 'scality:GetBucketQuota',
|
||||||
|
bucketUpdateQuota: 'scality:UpdateBucketQuota',
|
||||||
|
bucketDeleteQuota: 'scality:DeleteBucketQuota',
|
||||||
|
};
|
||||||
|
|
||||||
// action map used for request context
|
// action map used for request context
|
||||||
const actionMapRQ = {
|
const actionMapRQ = {
|
||||||
bucketPut: 's3:CreateBucket',
|
bucketPut: 's3:CreateBucket',
|
||||||
|
@ -65,6 +71,7 @@ const actionMapRQ = {
|
||||||
initiateMultipartUpload: 's3:PutObject',
|
initiateMultipartUpload: 's3:PutObject',
|
||||||
objectDeleteVersion: 's3:DeleteObjectVersion',
|
objectDeleteVersion: 's3:DeleteObjectVersion',
|
||||||
objectDeleteTaggingVersion: 's3:DeleteObjectVersionTagging',
|
objectDeleteTaggingVersion: 's3:DeleteObjectVersionTagging',
|
||||||
|
objectGetArchiveInfo: 'scality:GetObjectArchiveInfo',
|
||||||
objectGetVersion: 's3:GetObjectVersion',
|
objectGetVersion: 's3:GetObjectVersion',
|
||||||
objectGetACLVersion: 's3:GetObjectVersionAcl',
|
objectGetACLVersion: 's3:GetObjectVersionAcl',
|
||||||
objectGetTaggingVersion: 's3:GetObjectVersionTagging',
|
objectGetTaggingVersion: 's3:GetObjectVersionTagging',
|
||||||
|
@ -79,10 +86,11 @@ const actionMapRQ = {
|
||||||
objectPutLegalHoldVersion: 's3:PutObjectLegalHold',
|
objectPutLegalHoldVersion: 's3:PutObjectLegalHold',
|
||||||
listObjectVersions: 's3:ListBucketVersions',
|
listObjectVersions: 's3:ListBucketVersions',
|
||||||
...sharedActionMap,
|
...sharedActionMap,
|
||||||
|
...actionMapBucketQuotas,
|
||||||
};
|
};
|
||||||
|
|
||||||
// action map used for bucket policies
|
// action map used for bucket policies
|
||||||
const actionMapBP = { ...sharedActionMap };
|
const actionMapBP = actionMapRQ;
|
||||||
|
|
||||||
// action map for all relevant s3 actions
|
// action map for all relevant s3 actions
|
||||||
const actionMapS3 = {
|
const actionMapS3 = {
|
||||||
|
@ -151,6 +159,15 @@ const actionMonitoringMapS3 = {
|
||||||
objectPutTagging: 'PutObjectTagging',
|
objectPutTagging: 'PutObjectTagging',
|
||||||
objectRestore: 'RestoreObject',
|
objectRestore: 'RestoreObject',
|
||||||
serviceGet: 'ListBuckets',
|
serviceGet: 'ListBuckets',
|
||||||
|
bucketGetQuota: 'GetBucketQuota',
|
||||||
|
bucketUpdateQuota: 'UpdateBucketQuota',
|
||||||
|
bucketDeleteQuota: 'DeleteBucketQuota',
|
||||||
|
};
|
||||||
|
|
||||||
|
const actionMapAccountQuotas = {
|
||||||
|
UpdateAccountQuota : 'scality:UpdateAccountQuota',
|
||||||
|
DeleteAccountQuota : 'scality:DeleteAccountQuota',
|
||||||
|
GetAccountQuota : 'scality:GetAccountQuota',
|
||||||
};
|
};
|
||||||
|
|
||||||
const actionMapIAM = {
|
const actionMapIAM = {
|
||||||
|
@ -194,6 +211,7 @@ const actionMapIAM = {
|
||||||
tagUser: 'iam:TagUser',
|
tagUser: 'iam:TagUser',
|
||||||
unTagUser: 'iam:UntagUser',
|
unTagUser: 'iam:UntagUser',
|
||||||
listUserTags: 'iam:ListUserTags',
|
listUserTags: 'iam:ListUserTags',
|
||||||
|
...actionMapAccountQuotas,
|
||||||
};
|
};
|
||||||
|
|
||||||
const actionMapSSO = {
|
const actionMapSSO = {
|
||||||
|
@ -209,6 +227,14 @@ const actionMapMetadata = {
|
||||||
default: 'metadata:bucketd',
|
default: 'metadata:bucketd',
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const actionMapScuba = {
|
||||||
|
GetMetrics: 'scuba:GetMetrics',
|
||||||
|
AdminStartIngest: 'scuba:AdminStartIngest',
|
||||||
|
AdminStopIngest: 'scuba:AdminStopIngest',
|
||||||
|
AdminReadRaftCseq: 'scuba:AdminReadRaftCseq',
|
||||||
|
AdminTriggerRepair: 'scuba:AdminTriggerRepair',
|
||||||
|
};
|
||||||
|
|
||||||
export {
|
export {
|
||||||
actionMapRQ,
|
actionMapRQ,
|
||||||
actionMapBP,
|
actionMapBP,
|
||||||
|
@ -218,4 +244,5 @@ export {
|
||||||
actionMapSSO,
|
actionMapSSO,
|
||||||
actionMapSTS,
|
actionMapSTS,
|
||||||
actionMapMetadata,
|
actionMapMetadata,
|
||||||
|
actionMapScuba,
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import { handleWildcardInResource } from './wildcards';
|
import { handleWildcardInResource } from './wildcards';
|
||||||
|
import { policyArnAllowedEmptyAccountId } from '../../constants';
|
||||||
/**
|
/**
|
||||||
* Checks whether an ARN from a request matches an ARN in a policy
|
* Checks whether an ARN from a request matches an ARN in a policy
|
||||||
* to compare against each portion of the ARN from the request
|
* to compare against each portion of the ARN from the request
|
||||||
|
@ -38,9 +38,10 @@ export default function checkArnMatch(
|
||||||
const requestSegment = caseSensitive ? requestArnArr[j] :
|
const requestSegment = caseSensitive ? requestArnArr[j] :
|
||||||
requestArnArr[j].toLowerCase();
|
requestArnArr[j].toLowerCase();
|
||||||
const policyArnArr = policyArn.split(':');
|
const policyArnArr = policyArn.split(':');
|
||||||
// We want to allow an empty account ID for utapi service ARNs to not
|
// We want to allow an empty account ID for utapi and scuba service ARNs to not
|
||||||
// break compatibility.
|
// break compatibility.
|
||||||
if (j === 4 && policyArnArr[2] === 'utapi' && policyArnArr[4] === '') {
|
if (j === 4 && policyArnAllowedEmptyAccountId.includes(policyArnArr[2])
|
||||||
|
&& policyArnArr[4] === '') {
|
||||||
continue;
|
continue;
|
||||||
} else if (!segmentRegEx.test(requestSegment)) {
|
} else if (!segmentRegEx.test(requestSegment)) {
|
||||||
return false;
|
return false;
|
||||||
|
|
|
@ -168,6 +168,9 @@ export function findConditionKey(
|
||||||
return requestContext.getNeedTagEval() && requestContext.getRequestObjTags()
|
return requestContext.getNeedTagEval() && requestContext.getRequestObjTags()
|
||||||
? getTagKeys(requestContext.getRequestObjTags()!)
|
? getTagKeys(requestContext.getRequestObjTags()!)
|
||||||
: undefined;
|
: undefined;
|
||||||
|
// The maximum retention period is 100 years.
|
||||||
|
case 's3:object-lock-remaining-retention-days':
|
||||||
|
return requestContext.getObjectLockRetentionDays() || undefined;
|
||||||
default:
|
default:
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,9 @@ import assert from 'assert';
|
||||||
import * as crypto from 'crypto';
|
import * as crypto from 'crypto';
|
||||||
import * as stream from 'stream';
|
import * as stream from 'stream';
|
||||||
import azure from '@azure/storage-blob';
|
import azure from '@azure/storage-blob';
|
||||||
|
|
||||||
|
import { RequestLogger } from 'werelogs';
|
||||||
|
|
||||||
import ResultsCollector from './ResultsCollector';
|
import ResultsCollector from './ResultsCollector';
|
||||||
import SubStreamInterface from './SubStreamInterface';
|
import SubStreamInterface from './SubStreamInterface';
|
||||||
import * as objectUtils from '../objectUtils';
|
import * as objectUtils from '../objectUtils';
|
||||||
|
|
|
@ -1,19 +1,25 @@
|
||||||
const oneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day.
|
import { scaleMsPerDay } from '../objectUtils';
|
||||||
|
const msInOneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day.
|
||||||
|
|
||||||
export default class LifecycleDateTime {
|
export default class LifecycleDateTime {
|
||||||
_transitionOneDayEarlier?: boolean;
|
_transitionOneDayEarlier?: boolean;
|
||||||
_expireOneDayEarlier?: boolean;
|
_expireOneDayEarlier?: boolean;
|
||||||
|
_timeProgressionFactor?: number;
|
||||||
|
_scaledMsPerDay: number;
|
||||||
|
|
||||||
constructor(params?: {
|
constructor(params?: {
|
||||||
transitionOneDayEarlier: boolean;
|
transitionOneDayEarlier: boolean;
|
||||||
expireOneDayEarlier: boolean;
|
expireOneDayEarlier: boolean;
|
||||||
|
timeProgressionFactor: number;
|
||||||
}) {
|
}) {
|
||||||
this._transitionOneDayEarlier = params?.transitionOneDayEarlier;
|
this._transitionOneDayEarlier = params?.transitionOneDayEarlier;
|
||||||
this._expireOneDayEarlier = params?.expireOneDayEarlier;
|
this._expireOneDayEarlier = params?.expireOneDayEarlier;
|
||||||
|
this._timeProgressionFactor = params?.timeProgressionFactor || 1;
|
||||||
|
this._scaledMsPerDay = scaleMsPerDay(this._timeProgressionFactor);
|
||||||
}
|
}
|
||||||
|
|
||||||
getCurrentDate() {
|
getCurrentDate() {
|
||||||
const timeTravel = this._expireOneDayEarlier ? oneDay : 0;
|
const timeTravel = this._expireOneDayEarlier ? msInOneDay : 0;
|
||||||
return Date.now() + timeTravel;
|
return Date.now() + timeTravel;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -25,7 +31,7 @@ export default class LifecycleDateTime {
|
||||||
findDaysSince(date: Date) {
|
findDaysSince(date: Date) {
|
||||||
const now = this.getCurrentDate();
|
const now = this.getCurrentDate();
|
||||||
const diff = now - date.getTime();
|
const diff = now - date.getTime();
|
||||||
return Math.floor(diff / (1000 * 60 * 60 * 24));
|
return Math.floor(diff / this._scaledMsPerDay);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -52,8 +58,8 @@ export default class LifecycleDateTime {
|
||||||
}
|
}
|
||||||
if (transition.Days !== undefined) {
|
if (transition.Days !== undefined) {
|
||||||
const lastModifiedTime = this.getTimestamp(lastModified);
|
const lastModifiedTime = this.getTimestamp(lastModified);
|
||||||
const timeTravel = this._transitionOneDayEarlier ? -oneDay : 0;
|
const timeTravel = this._transitionOneDayEarlier ? -msInOneDay : 0;
|
||||||
return lastModifiedTime + (transition.Days * oneDay) + timeTravel;
|
return lastModifiedTime + (transition.Days * this._scaledMsPerDay) + timeTravel;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,8 +75,8 @@ export default class LifecycleDateTime {
|
||||||
) {
|
) {
|
||||||
if (transition.NoncurrentDays !== undefined) {
|
if (transition.NoncurrentDays !== undefined) {
|
||||||
const lastModifiedTime = this.getTimestamp(lastModified);
|
const lastModifiedTime = this.getTimestamp(lastModified);
|
||||||
const timeTravel = this._transitionOneDayEarlier ? -oneDay : 0;
|
const timeTravel = this._transitionOneDayEarlier ? -msInOneDay : 0;
|
||||||
return lastModifiedTime + (transition.NoncurrentDays * oneDay) + timeTravel;
|
return lastModifiedTime + (transition.NoncurrentDays * this._scaledMsPerDay) + timeTravel;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -284,6 +284,7 @@ export default class LifecycleUtils {
|
||||||
// Names are long, so obscuring a bit
|
// Names are long, so obscuring a bit
|
||||||
const ncve = 'NoncurrentVersionExpiration';
|
const ncve = 'NoncurrentVersionExpiration';
|
||||||
const ncd = 'NoncurrentDays';
|
const ncd = 'NoncurrentDays';
|
||||||
|
const nncv = 'NewerNoncurrentVersions';
|
||||||
|
|
||||||
if (!store[ncve]) {
|
if (!store[ncve]) {
|
||||||
store[ncve] = {};
|
store[ncve] = {};
|
||||||
|
@ -291,6 +292,7 @@ export default class LifecycleUtils {
|
||||||
if (!store[ncve][ncd] || rule[ncve][ncd] < store[ncve][ncd]) {
|
if (!store[ncve][ncd] || rule[ncve][ncd] < store[ncve][ncd]) {
|
||||||
store[ncve].ID = rule.ID;
|
store[ncve].ID = rule.ID;
|
||||||
store[ncve][ncd] = rule[ncve][ncd];
|
store[ncve][ncd] = rule[ncve][ncd];
|
||||||
|
store[ncve][nncv] = rule[ncve][nncv];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (rule.AbortIncompleteMultipartUpload
|
if (rule.AbortIncompleteMultipartUpload
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
const msInOneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day.
|
||||||
|
|
||||||
export const getMD5Buffer = (base64MD5: WithImplicitCoercion<string> | Uint8Array) =>
|
export const getMD5Buffer = (base64MD5: WithImplicitCoercion<string> | Uint8Array) =>
|
||||||
base64MD5 instanceof Uint8Array ? base64MD5 : Buffer.from(base64MD5, 'base64')
|
base64MD5 instanceof Uint8Array ? base64MD5 : Buffer.from(base64MD5, 'base64')
|
||||||
|
|
||||||
|
@ -6,3 +8,14 @@ export const getHexMD5 = (base64MD5: WithImplicitCoercion<string> | Uint8Array)
|
||||||
|
|
||||||
export const getBase64MD5 = (hexMD5: WithImplicitCoercion<string>) =>
|
export const getBase64MD5 = (hexMD5: WithImplicitCoercion<string>) =>
|
||||||
Buffer.from(hexMD5, 'hex').toString('base64');
|
Buffer.from(hexMD5, 'hex').toString('base64');
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculates the number of scaled milliseconds per day based on the given time progression factor.
|
||||||
|
* This function is intended for testing and simulation purposes only.
|
||||||
|
* @param {number} timeProgressionFactor - The desired time progression factor for scaling.
|
||||||
|
* @returns {number} The number of scaled milliseconds per day.
|
||||||
|
* If the result is 0, the minimum value of 1 millisecond is returned.
|
||||||
|
*/
|
||||||
|
export const scaleMsPerDay = (timeProgressionFactor: number): number =>
|
||||||
|
Math.round(msInOneDay / (timeProgressionFactor || 1)) || 1;
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
|
|
||||||
|
import { RequestLogger } from 'werelogs';
|
||||||
|
|
||||||
import errors from '../errors';
|
import errors from '../errors';
|
||||||
import routeGET from './routes/routeGET';
|
import routeGET from './routes/routeGET';
|
||||||
import routePUT from './routes/routePUT';
|
import routePUT from './routes/routePUT';
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import { RequestLogger } from 'werelogs';
|
||||||
|
|
||||||
import * as routesUtils from '../routesUtils';
|
import * as routesUtils from '../routesUtils';
|
||||||
import errors from '../../errors';
|
import errors from '../../errors';
|
||||||
import StatsClient from '../../metrics/StatsClient';
|
import StatsClient from '../../metrics/StatsClient';
|
||||||
|
@ -41,6 +43,8 @@ export default function routeDELETE(
|
||||||
return call('bucketDeleteEncryption');
|
return call('bucketDeleteEncryption');
|
||||||
} else if (query?.tagging !== undefined) {
|
} else if (query?.tagging !== undefined) {
|
||||||
return call('bucketDeleteTagging');
|
return call('bucketDeleteTagging');
|
||||||
|
} else if (query?.quota !== undefined) {
|
||||||
|
return call('bucketDeleteQuota');
|
||||||
}
|
}
|
||||||
call('bucketDelete');
|
call('bucketDelete');
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import { RequestLogger } from 'werelogs';
|
||||||
|
|
||||||
import * as routesUtils from '../routesUtils';
|
import * as routesUtils from '../routesUtils';
|
||||||
import errors from '../../errors';
|
import errors from '../../errors';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
|
@ -58,6 +60,8 @@ export default function routerGET(
|
||||||
call('bucketGetEncryption');
|
call('bucketGetEncryption');
|
||||||
} else if (query.search !== undefined) {
|
} else if (query.search !== undefined) {
|
||||||
call('metadataSearch')
|
call('metadataSearch')
|
||||||
|
} else if (query.quota !== undefined) {
|
||||||
|
call('bucketGetQuota');
|
||||||
} else {
|
} else {
|
||||||
// GET bucket
|
// GET bucket
|
||||||
call('bucketGet');
|
call('bucketGet');
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import { RequestLogger } from 'werelogs';
|
||||||
|
|
||||||
import * as routesUtils from '../routesUtils';
|
import * as routesUtils from '../routesUtils';
|
||||||
import errors from '../../errors';
|
import errors from '../../errors';
|
||||||
import StatsClient from '../../metrics/StatsClient';
|
import StatsClient from '../../metrics/StatsClient';
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import { RequestLogger } from 'werelogs';
|
||||||
|
|
||||||
import * as routesUtils from '../routesUtils';
|
import * as routesUtils from '../routesUtils';
|
||||||
import errors from '../../errors';
|
import errors from '../../errors';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import { RequestLogger } from 'werelogs';
|
||||||
|
|
||||||
import * as routesUtils from '../routesUtils';
|
import * as routesUtils from '../routesUtils';
|
||||||
import errors from '../../errors';
|
import errors from '../../errors';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import { RequestLogger } from 'werelogs';
|
||||||
|
|
||||||
import * as routesUtils from '../routesUtils';
|
import * as routesUtils from '../routesUtils';
|
||||||
import errors from '../../errors';
|
import errors from '../../errors';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
|
@ -103,6 +105,13 @@ export default function routePUT(
|
||||||
return routesUtils.responseNoBody(err, corsHeaders,
|
return routesUtils.responseNoBody(err, corsHeaders,
|
||||||
response, 200, log);
|
response, 200, log);
|
||||||
});
|
});
|
||||||
|
} else if (query.quota !== undefined) {
|
||||||
|
api.callApiMethod('bucketUpdateQuota', request, response,
|
||||||
|
log, (err, resHeaders) => {
|
||||||
|
routesUtils.statsReport500(err, statsClient);
|
||||||
|
return routesUtils.responseNoBody(err, resHeaders, response,
|
||||||
|
200, log);
|
||||||
|
});
|
||||||
} else {
|
} else {
|
||||||
// PUT bucket
|
// PUT bucket
|
||||||
return api.callApiMethod('bucketPut', request, response, log,
|
return api.callApiMethod('bucketPut', request, response, log,
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import { RequestLogger } from 'werelogs';
|
||||||
|
|
||||||
import * as routesUtils from '../routesUtils';
|
import * as routesUtils from '../routesUtils';
|
||||||
import errors from '../../errors';
|
import errors from '../../errors';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
|
@ -27,6 +29,11 @@ export default function routerWebsite(
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
// request being redirected
|
// request being redirected
|
||||||
if (redirectInfo) {
|
if (redirectInfo) {
|
||||||
|
if (err && redirectInfo.withError) {
|
||||||
|
return routesUtils.redirectRequestOnError(err,
|
||||||
|
'GET', redirectInfo, dataGetInfo, dataRetrievalParams,
|
||||||
|
response, resMetaHeaders, log)
|
||||||
|
}
|
||||||
// note that key might have been modified in websiteGet
|
// note that key might have been modified in websiteGet
|
||||||
// api to add index document
|
// api to add index document
|
||||||
return routesUtils.redirectRequest(redirectInfo,
|
return routesUtils.redirectRequest(redirectInfo,
|
||||||
|
@ -57,6 +64,11 @@ export default function routerWebsite(
|
||||||
(err, resMetaHeaders, redirectInfo, key) => {
|
(err, resMetaHeaders, redirectInfo, key) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
if (redirectInfo) {
|
if (redirectInfo) {
|
||||||
|
if (err && redirectInfo.withError) {
|
||||||
|
return routesUtils.redirectRequestOnError(err,
|
||||||
|
'HEAD', redirectInfo, null, dataRetrievalParams,
|
||||||
|
response, resMetaHeaders, log)
|
||||||
|
}
|
||||||
return routesUtils.redirectRequest(redirectInfo,
|
return routesUtils.redirectRequest(redirectInfo,
|
||||||
// TODO ARSN-217 encrypted does not exists in request.connection
|
// TODO ARSN-217 encrypted does not exists in request.connection
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
|
|
|
@ -1,10 +1,13 @@
|
||||||
import * as url from 'url';
|
import * as url from 'url';
|
||||||
|
import * as http from 'http';
|
||||||
|
import { eachSeries } from 'async';
|
||||||
|
|
||||||
|
import { RequestLogger } from 'werelogs';
|
||||||
|
|
||||||
import * as ipCheck from '../ipCheck';
|
import * as ipCheck from '../ipCheck';
|
||||||
import errors, { ArsenalError } from '../errors';
|
import errors, { ArsenalError } from '../errors';
|
||||||
import * as constants from '../constants';
|
import * as constants from '../constants';
|
||||||
import { eachSeries } from 'async';
|
|
||||||
import DataWrapper from '../storage/data/DataWrapper';
|
import DataWrapper from '../storage/data/DataWrapper';
|
||||||
import * as http from 'http';
|
|
||||||
import StatsClient from '../metrics/StatsClient';
|
import StatsClient from '../metrics/StatsClient';
|
||||||
import { objectKeyByteLimit } from '../constants';
|
import { objectKeyByteLimit } from '../constants';
|
||||||
const jsutil = require('../jsutil');
|
const jsutil = require('../jsutil');
|
||||||
|
@ -691,6 +694,8 @@ export function streamUserErrorPage(
|
||||||
log: RequestLogger,
|
log: RequestLogger,
|
||||||
) {
|
) {
|
||||||
setCommonResponseHeaders(corsHeaders, response, log);
|
setCommonResponseHeaders(corsHeaders, response, log);
|
||||||
|
response.setHeader('x-amz-error-code', err.message);
|
||||||
|
response.setHeader('x-amz-error-message', err.description);
|
||||||
response.writeHead(err.code, { 'Content-type': 'text/html' });
|
response.writeHead(err.code, { 'Content-type': 'text/html' });
|
||||||
response.on('finish', () => {
|
response.on('finish', () => {
|
||||||
// TODO ARSN-216 Fix logger
|
// TODO ARSN-216 Fix logger
|
||||||
|
@ -873,7 +878,7 @@ export function redirectRequest(
|
||||||
}
|
}
|
||||||
let redirectLocation = justPath ? `/${redirectKey}` :
|
let redirectLocation = justPath ? `/${redirectKey}` :
|
||||||
`${redirectProtocol}://${redirectHostName}/${redirectKey}`;
|
`${redirectProtocol}://${redirectHostName}/${redirectKey}`;
|
||||||
if (!redirectKey && redirectLocationHeader) {
|
if (!redirectKey && redirectLocationHeader && redirectLocation !== '/') {
|
||||||
// remove hanging slash
|
// remove hanging slash
|
||||||
redirectLocation = redirectLocation.slice(0, -1);
|
redirectLocation = redirectLocation.slice(0, -1);
|
||||||
}
|
}
|
||||||
|
@ -890,6 +895,52 @@ export function redirectRequest(
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* redirectRequestOnError - redirect with an error body
|
||||||
|
* @param err - arsenal error object
|
||||||
|
* @param method - HTTP method
|
||||||
|
* @param routingInfo - info for routing
|
||||||
|
* @param [routingInfo.withError] - flag to differentiate from routing rules
|
||||||
|
* @param [routingInfo.location] - location header
|
||||||
|
* @param dataLocations --
|
||||||
|
* - array of locations to get streams from backend
|
||||||
|
* @param retrieveDataParams - params to create instance of
|
||||||
|
* data retrieval function
|
||||||
|
* @param response - response object
|
||||||
|
* @param corsHeaders - CORS-related response headers
|
||||||
|
* @param log - Werelogs instance
|
||||||
|
*/
|
||||||
|
export function redirectRequestOnError(
|
||||||
|
err: ArsenalError,
|
||||||
|
method: 'HEAD' | 'GET',
|
||||||
|
routingInfo: {
|
||||||
|
withError: true;
|
||||||
|
location: string;
|
||||||
|
},
|
||||||
|
dataLocations: { size: string | number }[] | null,
|
||||||
|
retrieveDataParams: any,
|
||||||
|
response: http.ServerResponse,
|
||||||
|
corsHeaders: { [key: string]: string },
|
||||||
|
log: RequestLogger,
|
||||||
|
) {
|
||||||
|
response.setHeader('Location', routingInfo.location);
|
||||||
|
|
||||||
|
if (!dataLocations && err.is.Found) {
|
||||||
|
if (method === 'HEAD') {
|
||||||
|
return errorHeaderResponse(err, response, corsHeaders, log);
|
||||||
|
}
|
||||||
|
response.setHeader('x-amz-error-code', err.message);
|
||||||
|
response.setHeader('x-amz-error-message', err.description);
|
||||||
|
return errorHtmlResponse(err, false, '', response, corsHeaders, log);
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is reached only for website error document (GET only)
|
||||||
|
const overrideErrorCode = err.flatten();
|
||||||
|
overrideErrorCode.code = 301;
|
||||||
|
return streamUserErrorPage(ArsenalError.unflatten(overrideErrorCode)!,
|
||||||
|
dataLocations || [], retrieveDataParams, response, corsHeaders, log);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get bucket name and object name from the request
|
* Get bucket name and object name from the request
|
||||||
* @param request - http request object
|
* @param request - http request object
|
||||||
|
|
|
@ -2,6 +2,8 @@ const async = require('async');
|
||||||
const PassThrough = require('stream').PassThrough;
|
const PassThrough = require('stream').PassThrough;
|
||||||
const assert = require('assert');
|
const assert = require('assert');
|
||||||
|
|
||||||
|
const { Logger } = require('werelogs');
|
||||||
|
|
||||||
const errors = require('../../errors').default;
|
const errors = require('../../errors').default;
|
||||||
const MD5Sum = require('../../s3middleware/MD5Sum').default;
|
const MD5Sum = require('../../s3middleware/MD5Sum').default;
|
||||||
const NullStream = require('../../s3middleware/nullStream').default;
|
const NullStream = require('../../s3middleware/nullStream').default;
|
||||||
|
@ -27,6 +29,7 @@ class DataWrapper {
|
||||||
this.metadata = metadata;
|
this.metadata = metadata;
|
||||||
this.locStorageCheckFn = locStorageCheckFn;
|
this.locStorageCheckFn = locStorageCheckFn;
|
||||||
this.vault = vault;
|
this.vault = vault;
|
||||||
|
this.logger = new Logger('DataWrapper');
|
||||||
}
|
}
|
||||||
|
|
||||||
put(cipherBundle, value, valueSize, keyContext, backendInfo, log, cb) {
|
put(cipherBundle, value, valueSize, keyContext, backendInfo, log, cb) {
|
||||||
|
@ -127,7 +130,7 @@ class DataWrapper {
|
||||||
}
|
}
|
||||||
|
|
||||||
delete(objectGetInfo, log, cb) {
|
delete(objectGetInfo, log, cb) {
|
||||||
const callback = cb || log.end;
|
const callback = cb || (() => {});
|
||||||
const isMdModelVersion2 = typeof(objectGetInfo) === 'string';
|
const isMdModelVersion2 = typeof(objectGetInfo) === 'string';
|
||||||
const isRequiredStringKey =
|
const isRequiredStringKey =
|
||||||
constants.clientsRequireStringKey[this.implName];
|
constants.clientsRequireStringKey[this.implName];
|
||||||
|
@ -176,7 +179,9 @@ class DataWrapper {
|
||||||
newObjDataStoreName)) {
|
newObjDataStoreName)) {
|
||||||
return process.nextTick(cb);
|
return process.nextTick(cb);
|
||||||
}
|
}
|
||||||
log.trace('initiating batch delete', {
|
const delLog = this.logger.newRequestLoggerFromSerializedUids(
|
||||||
|
log.getSerializedUids());
|
||||||
|
delLog.trace('initiating batch delete', {
|
||||||
keys: locations,
|
keys: locations,
|
||||||
implName: this.implName,
|
implName: this.implName,
|
||||||
method: 'batchDelete',
|
method: 'batchDelete',
|
||||||
|
@ -202,21 +207,21 @@ class DataWrapper {
|
||||||
return false;
|
return false;
|
||||||
});
|
});
|
||||||
if (shouldBatchDelete && keys.length > 1) {
|
if (shouldBatchDelete && keys.length > 1) {
|
||||||
return this.client.batchDelete(backendName, { keys }, log, cb);
|
return this.client.batchDelete(backendName, { keys }, delLog, cb);
|
||||||
}
|
}
|
||||||
return async.eachLimit(locations, 5, (loc, next) => {
|
return async.eachLimit(locations, 5, (loc, next) => {
|
||||||
process.nextTick(() => this.delete(loc, log, next));
|
process.nextTick(() => this.delete(loc, delLog, next));
|
||||||
},
|
},
|
||||||
err => {
|
err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.end().error('batch delete failed', { error: err });
|
delLog.end().error('batch delete failed', { error: err });
|
||||||
// deletion of non-existing objects result in 204
|
// deletion of non-existing objects result in 204
|
||||||
if (err.code === 404) {
|
if (err.code === 404) {
|
||||||
return cb();
|
return cb();
|
||||||
}
|
}
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
log.end().trace('batch delete successfully completed');
|
delLog.end().trace('batch delete successfully completed');
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
const { http, https } = require('httpagent');
|
const { http, https } = require('httpagent');
|
||||||
const url = require('url');
|
const url = require('url');
|
||||||
const AWS = require('aws-sdk');
|
const AWS = require('aws-sdk');
|
||||||
const Sproxy = require('sproxydclient');
|
|
||||||
const Hyperdrive = require('hdclient');
|
|
||||||
const HttpsProxyAgent = require('https-proxy-agent');
|
const HttpsProxyAgent = require('https-proxy-agent');
|
||||||
|
|
||||||
|
require("aws-sdk/lib/maintenance_mode_message").suppress = true;
|
||||||
|
|
||||||
const constants = require('../../constants');
|
const constants = require('../../constants');
|
||||||
const DataFileBackend = require('./file/DataFileInterface');
|
const DataFileBackend = require('./file/DataFileInterface');
|
||||||
const inMemory = require('./in_memory/datastore').backend;
|
const inMemory = require('./in_memory/datastore').backend;
|
||||||
|
@ -25,8 +25,13 @@ function parseLC(config, vault) {
|
||||||
if (locationObj.type === 'file') {
|
if (locationObj.type === 'file') {
|
||||||
clients[location] = new DataFileBackend(config);
|
clients[location] = new DataFileBackend(config);
|
||||||
}
|
}
|
||||||
|
if (locationObj.type === 'vitastor') {
|
||||||
|
const VitastorBackend = require('./vitastor/VitastorBackend');
|
||||||
|
clients[location] = new VitastorBackend(location, locationObj.details);
|
||||||
|
}
|
||||||
if (locationObj.type === 'scality') {
|
if (locationObj.type === 'scality') {
|
||||||
if (locationObj.details.connector.sproxyd) {
|
if (locationObj.details.connector.sproxyd) {
|
||||||
|
const Sproxy = require('sproxydclient');
|
||||||
clients[location] = new Sproxy({
|
clients[location] = new Sproxy({
|
||||||
bootstrap: locationObj.details.connector
|
bootstrap: locationObj.details.connector
|
||||||
.sproxyd.bootstrap,
|
.sproxyd.bootstrap,
|
||||||
|
@ -41,6 +46,7 @@ function parseLC(config, vault) {
|
||||||
});
|
});
|
||||||
clients[location].clientType = 'scality';
|
clients[location].clientType = 'scality';
|
||||||
} else if (locationObj.details.connector.hdclient) {
|
} else if (locationObj.details.connector.hdclient) {
|
||||||
|
const Hyperdrive = require('hdclient');
|
||||||
clients[location] = new Hyperdrive.hdcontroller.HDProxydClient(
|
clients[location] = new Hyperdrive.hdcontroller.HDProxydClient(
|
||||||
locationObj.details.connector.hdclient);
|
locationObj.details.connector.hdclient);
|
||||||
clients[location].clientType = 'scality';
|
clients[location].clientType = 'scality';
|
||||||
|
|
|
@ -5,6 +5,7 @@ const { parseTagFromQuery } = require('../../s3middleware/tagging');
|
||||||
const { externalBackendHealthCheckInterval } = require('../../constants');
|
const { externalBackendHealthCheckInterval } = require('../../constants');
|
||||||
const DataFileBackend = require('./file/DataFileInterface');
|
const DataFileBackend = require('./file/DataFileInterface');
|
||||||
const { createLogger, checkExternalBackend } = require('./external/utils');
|
const { createLogger, checkExternalBackend } = require('./external/utils');
|
||||||
|
const jsutil = require('../../jsutil');
|
||||||
|
|
||||||
class MultipleBackendGateway {
|
class MultipleBackendGateway {
|
||||||
constructor(clients, metadata, locStorageCheckFn) {
|
constructor(clients, metadata, locStorageCheckFn) {
|
||||||
|
@ -199,11 +200,12 @@ class MultipleBackendGateway {
|
||||||
uploadPart(request, streamingV4Params, stream, size, location, key,
|
uploadPart(request, streamingV4Params, stream, size, location, key,
|
||||||
uploadId, partNumber, bucketName, log, cb) {
|
uploadId, partNumber, bucketName, log, cb) {
|
||||||
const client = this.clients[location];
|
const client = this.clients[location];
|
||||||
|
const cbOnce = jsutil.once(cb);
|
||||||
|
|
||||||
if (client.uploadPart) {
|
if (client.uploadPart) {
|
||||||
return this.locStorageCheckFn(location, size, log, err => {
|
return this.locStorageCheckFn(location, size, log, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return cb(err);
|
return cbOnce(err);
|
||||||
}
|
}
|
||||||
return client.uploadPart(request, streamingV4Params, stream,
|
return client.uploadPart(request, streamingV4Params, stream,
|
||||||
size, key, uploadId, partNumber, bucketName, log,
|
size, key, uploadId, partNumber, bucketName, log,
|
||||||
|
@ -217,14 +219,14 @@ class MultipleBackendGateway {
|
||||||
'metric following object PUT failure',
|
'metric following object PUT failure',
|
||||||
{ error: error.message });
|
{ error: error.message });
|
||||||
}
|
}
|
||||||
return cb(err);
|
return cbOnce(err);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return cb(null, partInfo);
|
return cbOnce(null, partInfo);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return cb();
|
return cbOnce();
|
||||||
}
|
}
|
||||||
|
|
||||||
listParts(key, uploadId, location, bucketName, partNumberMarker, maxParts,
|
listParts(key, uploadId, location, bucketName, partNumberMarker, maxParts,
|
||||||
|
@ -279,11 +281,10 @@ class MultipleBackendGateway {
|
||||||
}
|
}
|
||||||
|
|
||||||
objectTagging(method, key, bucket, objectMD, log, cb) {
|
objectTagging(method, key, bucket, objectMD, log, cb) {
|
||||||
console.log('>> objectTagging', method, key, bucket, objectMD);
|
|
||||||
// if legacy, objectMD will not contain dataStoreName, so just return
|
// if legacy, objectMD will not contain dataStoreName, so just return
|
||||||
const client = this.clients[objectMD.dataStoreName];
|
const client = this.clients[objectMD.dataStoreName];
|
||||||
if (client && client[`object${method}Tagging`]) {
|
if (client && client[`object${method}Tagging`]) {
|
||||||
return client[`object${method}Tagging`](key, bucket.getName(), objectMD, log,
|
return client[`object${method}Tagging`](key, bucket, objectMD, log,
|
||||||
cb);
|
cb);
|
||||||
}
|
}
|
||||||
return cb();
|
return cb();
|
||||||
|
|
|
@ -8,6 +8,7 @@ const getMetaHeaders =
|
||||||
const { prepareStream } = require('../../../s3middleware/prepareStream');
|
const { prepareStream } = require('../../../s3middleware/prepareStream');
|
||||||
const { createLogger, logHelper, removeQuotes, trimXMetaPrefix } =
|
const { createLogger, logHelper, removeQuotes, trimXMetaPrefix } =
|
||||||
require('./utils');
|
require('./utils');
|
||||||
|
const jsutil = require('../../../jsutil');
|
||||||
|
|
||||||
const missingVerIdInternalError = errors.InternalError.customizeDescription(
|
const missingVerIdInternalError = errors.InternalError.customizeDescription(
|
||||||
'Invalid state. Please ensure versioning is enabled ' +
|
'Invalid state. Please ensure versioning is enabled ' +
|
||||||
|
@ -69,7 +70,6 @@ class AwsClient {
|
||||||
|
|
||||||
_createAwsKey(requestBucketName, requestObjectKey,
|
_createAwsKey(requestBucketName, requestObjectKey,
|
||||||
bucketMatch) {
|
bucketMatch) {
|
||||||
console.log('===', requestBucketName, requestObjectKey, bucketMatch);
|
|
||||||
if (bucketMatch) {
|
if (bucketMatch) {
|
||||||
return requestObjectKey;
|
return requestObjectKey;
|
||||||
}
|
}
|
||||||
|
@ -318,9 +318,11 @@ class AwsClient {
|
||||||
uploadPart(request, streamingV4Params, stream, size, key, uploadId,
|
uploadPart(request, streamingV4Params, stream, size, key, uploadId,
|
||||||
partNumber, bucketName, log, callback) {
|
partNumber, bucketName, log, callback) {
|
||||||
let hashedStream = stream;
|
let hashedStream = stream;
|
||||||
|
const cbOnce = jsutil.once(callback);
|
||||||
|
|
||||||
if (request) {
|
if (request) {
|
||||||
const partStream = prepareStream(request, streamingV4Params,
|
const partStream = prepareStream(request, streamingV4Params,
|
||||||
this._vault, log, callback);
|
this._vault, log, cbOnce);
|
||||||
hashedStream = new MD5Sum();
|
hashedStream = new MD5Sum();
|
||||||
partStream.pipe(hashedStream);
|
partStream.pipe(hashedStream);
|
||||||
}
|
}
|
||||||
|
@ -334,7 +336,7 @@ class AwsClient {
|
||||||
if (err) {
|
if (err) {
|
||||||
logHelper(log, 'error', 'err from data backend ' +
|
logHelper(log, 'error', 'err from data backend ' +
|
||||||
'on uploadPart', err, this._dataStoreName, this.clientType);
|
'on uploadPart', err, this._dataStoreName, this.clientType);
|
||||||
return callback(errors.ServiceUnavailable
|
return cbOnce(errors.ServiceUnavailable
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`${this.type}: ${err.message}`),
|
`${this.type}: ${err.message}`),
|
||||||
);
|
);
|
||||||
|
@ -348,7 +350,7 @@ class AwsClient {
|
||||||
dataStoreName: this._dataStoreName,
|
dataStoreName: this._dataStoreName,
|
||||||
dataStoreETag: noQuotesETag,
|
dataStoreETag: noQuotesETag,
|
||||||
};
|
};
|
||||||
return callback(null, dataRetrievalInfo);
|
return cbOnce(null, dataRetrievalInfo);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -490,17 +492,14 @@ class AwsClient {
|
||||||
}
|
}
|
||||||
|
|
||||||
objectPutTagging(key, bucketName, objectMD, log, callback) {
|
objectPutTagging(key, bucketName, objectMD, log, callback) {
|
||||||
console.log('0 >>', this._client.config, this._bucketMatch, '--->', this._createAwsKey(bucketName, key, this._bucketMatch));
|
|
||||||
const awsBucket = this._awsBucketName;
|
const awsBucket = this._awsBucketName;
|
||||||
const awsKey = this._createAwsKey(bucketName, key, this._bucketMatch);
|
const awsKey = this._createAwsKey(bucketName, key, this._bucketMatch);
|
||||||
const dataStoreVersionId = objectMD.location[0].dataStoreVersionId;
|
const dataStoreVersionId = objectMD.location[0].dataStoreVersionId;
|
||||||
console.log('1 >>', JSON.stringify(objectMD), key, awsKey);
|
|
||||||
const tagParams = {
|
const tagParams = {
|
||||||
Bucket: awsBucket,
|
Bucket: awsBucket,
|
||||||
Key: awsKey,
|
Key: awsKey,
|
||||||
VersionId: dataStoreVersionId,
|
VersionId: dataStoreVersionId,
|
||||||
};
|
};
|
||||||
console.log('2 >>', tagParams);
|
|
||||||
const keyArray = Object.keys(objectMD.tags);
|
const keyArray = Object.keys(objectMD.tags);
|
||||||
tagParams.Tagging = {};
|
tagParams.Tagging = {};
|
||||||
tagParams.Tagging.TagSet = keyArray.map(key => {
|
tagParams.Tagging.TagSet = keyArray.map(key => {
|
||||||
|
@ -509,7 +508,6 @@ class AwsClient {
|
||||||
});
|
});
|
||||||
return this._client.putObjectTagging(tagParams, err => {
|
return this._client.putObjectTagging(tagParams, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
console.log('ERROR!! >>', err);
|
|
||||||
logHelper(log, 'error', 'error from data backend on ' +
|
logHelper(log, 'error', 'error from data backend on ' +
|
||||||
'putObjectTagging', err,
|
'putObjectTagging', err,
|
||||||
this._dataStoreName, this.clientType);
|
this._dataStoreName, this.clientType);
|
||||||
|
|
|
@ -0,0 +1,696 @@
|
||||||
|
// Zenko CloudServer Vitastor data storage backend adapter
|
||||||
|
// Copyright (c) Vitaliy Filippov, 2019+
|
||||||
|
// License: VNPL-1.1 (see README.md for details)
|
||||||
|
|
||||||
|
const stream = require('stream');
|
||||||
|
|
||||||
|
const vitastor = require('vitastor');
|
||||||
|
|
||||||
|
const VOLUME_MAGIC = 'VstS3Vol';
|
||||||
|
const OBJECT_MAGIC = 'VstS3Obj';
|
||||||
|
const FLAG_DELETED = 2n;
|
||||||
|
|
||||||
|
type Volume = {
|
||||||
|
id: number,
|
||||||
|
partial_sectors: {
|
||||||
|
[key: string]: {
|
||||||
|
buffer: Buffer,
|
||||||
|
refs: number,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
header: {
|
||||||
|
location: string,
|
||||||
|
bucket: string,
|
||||||
|
max_size: number,
|
||||||
|
create_ts: number,
|
||||||
|
used_ts: number,
|
||||||
|
size: number,
|
||||||
|
objects: number,
|
||||||
|
removed_objects: number,
|
||||||
|
object_bytes: number,
|
||||||
|
removed_bytes: number,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
type ObjectHeader = {
|
||||||
|
size: number,
|
||||||
|
key: string,
|
||||||
|
part_num?: number,
|
||||||
|
};
|
||||||
|
|
||||||
|
class VitastorBackend
|
||||||
|
{
|
||||||
|
locationName: string;
|
||||||
|
config: {
|
||||||
|
pool_id: number,
|
||||||
|
metadata_image: string,
|
||||||
|
metadata_pool_id: number,
|
||||||
|
metadata_inode_num: number,
|
||||||
|
size_buckets: number[],
|
||||||
|
size_bucket_mul: number,
|
||||||
|
id_batch_size: number,
|
||||||
|
sector_size: number,
|
||||||
|
write_chunk_size: number,
|
||||||
|
read_chunk_size: number,
|
||||||
|
pack_objects: boolean,
|
||||||
|
// and also other parameters for vitastor itself
|
||||||
|
};
|
||||||
|
next_id: number;
|
||||||
|
alloc_id: number;
|
||||||
|
opened: boolean;
|
||||||
|
on_open: ((...args: any[]) => void)[] | null;
|
||||||
|
open_error: Error | null;
|
||||||
|
cli: any;
|
||||||
|
kv: any;
|
||||||
|
volumes: {
|
||||||
|
[bucket: string]: {
|
||||||
|
[max_size: string]: Volume,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
volumes_by_id: {
|
||||||
|
[id: string]: Volume,
|
||||||
|
};
|
||||||
|
volume_delete_stats: {
|
||||||
|
[id: string]: {
|
||||||
|
count: number,
|
||||||
|
bytes: number,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
constructor(locationName, config)
|
||||||
|
{
|
||||||
|
this.locationName = locationName;
|
||||||
|
this.config = config;
|
||||||
|
// validate config
|
||||||
|
this.config.pool_id = Number(this.config.pool_id) || 0;
|
||||||
|
if (!this.config.pool_id)
|
||||||
|
throw new Error('pool_id is required for Vitastor');
|
||||||
|
if (!this.config.metadata_image && (!this.config.metadata_pool_id || !this.config.metadata_inode_num))
|
||||||
|
throw new Error('metadata_image or metadata_inode is required for Vitastor');
|
||||||
|
if (!this.config.size_buckets || !this.config.size_buckets.length)
|
||||||
|
this.config.size_buckets = [ 32*1024, 128*1024, 512*1024, 2*1024, 8*1024 ];
|
||||||
|
this.config.size_bucket_mul = Number(this.config.size_bucket_mul) || 2;
|
||||||
|
this.config.id_batch_size = Number(this.config.id_batch_size) || 100;
|
||||||
|
this.config.sector_size = Number(this.config.sector_size) || 0;
|
||||||
|
if (this.config.sector_size < 4096)
|
||||||
|
this.config.sector_size = 4096;
|
||||||
|
this.config.write_chunk_size = Number(this.config.write_chunk_size) || 0;
|
||||||
|
if (this.config.write_chunk_size < this.config.sector_size)
|
||||||
|
this.config.write_chunk_size = 4*1024*1024; // 4 MB
|
||||||
|
this.config.read_chunk_size = Number(this.config.read_chunk_size) || 0;
|
||||||
|
if (this.config.read_chunk_size < this.config.sector_size)
|
||||||
|
this.config.read_chunk_size = 4*1024*1024; // 4 MB
|
||||||
|
this.config.pack_objects = !!this.config.pack_objects;
|
||||||
|
// state
|
||||||
|
this.next_id = 1;
|
||||||
|
this.alloc_id = 0;
|
||||||
|
this.opened = false;
|
||||||
|
this.on_open = null;
|
||||||
|
this.open_error = null;
|
||||||
|
this.cli = new vitastor.Client(config);
|
||||||
|
this.kv = new vitastor.KV(this.cli);
|
||||||
|
// we group objects into volumes by bucket and size
|
||||||
|
this.volumes = {};
|
||||||
|
this.volumes_by_id = {};
|
||||||
|
this.volume_delete_stats = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
async _makeVolumeId()
|
||||||
|
{
|
||||||
|
if (this.next_id <= this.alloc_id)
|
||||||
|
{
|
||||||
|
return this.next_id++;
|
||||||
|
}
|
||||||
|
const id_key = 'id'+this.config.pool_id;
|
||||||
|
const [ err, prev ] = await new Promise<[ any, string ]>(ok => this.kv.get(id_key, (err, value) => ok([ err, value ])));
|
||||||
|
if (err && err != vitastor.ENOENT)
|
||||||
|
{
|
||||||
|
throw new Error(err);
|
||||||
|
}
|
||||||
|
const new_id = (parseInt(prev) || 0) + 1;
|
||||||
|
this.next_id = new_id;
|
||||||
|
this.alloc_id = this.next_id + this.config.id_batch_size - 1;
|
||||||
|
await new Promise((ok, no) => this.kv.set(id_key, this.alloc_id, err => (err ? no(new Error(err)) : ok(null)), cas_old => cas_old === prev));
|
||||||
|
return this.next_id;
|
||||||
|
}
|
||||||
|
|
||||||
|
async _getVolume(bucketName, size)
|
||||||
|
{
|
||||||
|
if (!this.opened)
|
||||||
|
{
|
||||||
|
if (this.on_open)
|
||||||
|
{
|
||||||
|
await new Promise(ok => this.on_open!.push(ok));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
this.on_open = [];
|
||||||
|
if (this.config.metadata_image)
|
||||||
|
{
|
||||||
|
const img = new vitastor.Image(this.cli, this.config.metadata_image);
|
||||||
|
const info = await new Promise<{ pool_id: number, inode_num: number }>(ok => img.get_info(ok));
|
||||||
|
this.config.metadata_pool_id = info.pool_id;
|
||||||
|
this.config.metadata_inode_num = info.inode_num;
|
||||||
|
}
|
||||||
|
const kv_config = {};
|
||||||
|
for (const key in this.config)
|
||||||
|
{
|
||||||
|
if (key.substr(0, 3) === 'kv_')
|
||||||
|
kv_config[key] = this.config[key];
|
||||||
|
}
|
||||||
|
this.open_error = await new Promise(ok => this.kv.open(
|
||||||
|
this.config.metadata_pool_id, this.config.metadata_inode_num,
|
||||||
|
kv_config, err => ok(err ? new Error(err) : null)
|
||||||
|
));
|
||||||
|
this.opened = true;
|
||||||
|
this.on_open.map(cb => setImmediate(cb));
|
||||||
|
this.on_open = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this.open_error)
|
||||||
|
{
|
||||||
|
throw this.open_error;
|
||||||
|
}
|
||||||
|
let i;
|
||||||
|
for (i = 0; i < this.config.size_buckets.length && size >= this.config.size_buckets[i]; i++) {}
|
||||||
|
let s;
|
||||||
|
if (i < this.config.size_buckets.length)
|
||||||
|
s = this.config.size_buckets[i];
|
||||||
|
else if (this.config.size_bucket_mul > 1)
|
||||||
|
{
|
||||||
|
while (size >= s)
|
||||||
|
s = Math.floor(this.config.size_bucket_mul * s);
|
||||||
|
}
|
||||||
|
if (!this.volumes[bucketName])
|
||||||
|
{
|
||||||
|
this.volumes[bucketName] = {};
|
||||||
|
}
|
||||||
|
if (this.volumes[bucketName][s])
|
||||||
|
{
|
||||||
|
return this.volumes[bucketName][s];
|
||||||
|
}
|
||||||
|
const new_id = await this._makeVolumeId();
|
||||||
|
const new_vol = this.volumes[bucketName][s] = {
|
||||||
|
id: new_id,
|
||||||
|
// FIXME: partial_sectors should be written with CAS because otherwise we may lose quick deletes
|
||||||
|
partial_sectors: {},
|
||||||
|
header: {
|
||||||
|
location: this.locationName,
|
||||||
|
bucket: bucketName,
|
||||||
|
max_size: s,
|
||||||
|
create_ts: Date.now(),
|
||||||
|
used_ts: Date.now(),
|
||||||
|
size: this.config.sector_size, // initial position is right after header
|
||||||
|
objects: 0,
|
||||||
|
removed_objects: 0,
|
||||||
|
object_bytes: 0,
|
||||||
|
removed_bytes: 0,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
this.volumes_by_id[new_id] = new_vol;
|
||||||
|
const header_text = JSON.stringify(this.volumes[bucketName][s].header);
|
||||||
|
const buf = Buffer.alloc(this.config.sector_size);
|
||||||
|
buf.write(VOLUME_MAGIC + header_text, 0);
|
||||||
|
await new Promise((ok, no) => this.cli.write(
|
||||||
|
this.config.pool_id, new_id, 0, buf, err => (err ? no(new Error(err)) : ok(null))
|
||||||
|
));
|
||||||
|
await new Promise((ok, no) => this.kv.set(
|
||||||
|
'vol_'+this.config.pool_id+'_'+new_id, header_text, err => (err ? no(new Error(err)) : ok(null)), cas_old => !cas_old
|
||||||
|
));
|
||||||
|
return new_vol;
|
||||||
|
}
|
||||||
|
|
||||||
|
toObjectGetInfo(objectKey, bucketName, storageLocation)
|
||||||
|
{
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
_bufferStart(vol, cur_pos, cur_size, cur_chunks, sector_refs)
|
||||||
|
{
|
||||||
|
if ((cur_pos % this.config.sector_size) ||
|
||||||
|
Math.floor((cur_pos + cur_size) / this.config.sector_size) == Math.floor(cur_pos / this.config.sector_size))
|
||||||
|
{
|
||||||
|
const sect_pos = Math.floor(cur_pos / this.config.sector_size) * this.config.sector_size;
|
||||||
|
const sect = vol.partial_sectors[sect_pos]
|
||||||
|
? vol.partial_sectors[sect_pos].buffer
|
||||||
|
: Buffer.alloc(this.config.sector_size);
|
||||||
|
if (this.config.pack_objects)
|
||||||
|
{
|
||||||
|
// Save only if <pack_objects>
|
||||||
|
if (!vol.partial_sectors[sect_pos])
|
||||||
|
vol.partial_sectors[sect_pos] = { buffer: sect, refs: 0 };
|
||||||
|
vol.partial_sectors[sect_pos].refs++;
|
||||||
|
sector_refs.push(sect_pos);
|
||||||
|
}
|
||||||
|
let off = cur_pos % this.config.sector_size;
|
||||||
|
let i = 0;
|
||||||
|
for (; i < cur_chunks.length; i++)
|
||||||
|
{
|
||||||
|
let copy_len = this.config.sector_size - off;
|
||||||
|
copy_len = copy_len > cur_chunks[i].length ? cur_chunks[i].length : copy_len;
|
||||||
|
cur_chunks[i].copy(sect, off, 0, copy_len);
|
||||||
|
off += copy_len;
|
||||||
|
if (copy_len < cur_chunks[i].length)
|
||||||
|
{
|
||||||
|
cur_chunks[i] = cur_chunks[i].slice(copy_len);
|
||||||
|
cur_size -= copy_len;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
cur_size -= cur_chunks[i].length;
|
||||||
|
}
|
||||||
|
cur_chunks.splice(0, i, sect);
|
||||||
|
cur_size += this.config.sector_size;
|
||||||
|
cur_pos = sect_pos;
|
||||||
|
}
|
||||||
|
return [ cur_pos, cur_size ];
|
||||||
|
}
|
||||||
|
|
||||||
|
_bufferEnd(vol, cur_pos, cur_size, cur_chunks, sector_refs, write_all)
|
||||||
|
{
|
||||||
|
const write_pos = cur_pos;
|
||||||
|
const write_chunks = cur_chunks;
|
||||||
|
let write_size = cur_size;
|
||||||
|
cur_chunks = [];
|
||||||
|
cur_pos += cur_size;
|
||||||
|
cur_size = 0;
|
||||||
|
let remain = (cur_pos % this.config.sector_size);
|
||||||
|
if (remain > 0)
|
||||||
|
{
|
||||||
|
cur_pos -= remain;
|
||||||
|
let last_sect = null;
|
||||||
|
if (write_all)
|
||||||
|
{
|
||||||
|
last_sect = vol.partial_sectors[cur_pos]
|
||||||
|
? vol.partial_sectors[cur_pos].buffer
|
||||||
|
: Buffer.alloc(this.config.sector_size);
|
||||||
|
if (this.config.pack_objects)
|
||||||
|
{
|
||||||
|
// Save only if <pack_objects>
|
||||||
|
if (!vol.partial_sectors[cur_pos])
|
||||||
|
vol.partial_sectors[cur_pos] = { buffer: last_sect, refs: 0 };
|
||||||
|
vol.partial_sectors[cur_pos].refs++;
|
||||||
|
sector_refs.push(cur_pos);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
write_size -= remain;
|
||||||
|
if (write_size < 0)
|
||||||
|
write_size = 0;
|
||||||
|
for (let i = write_chunks.length-1; i >= 0 && remain > 0; i--)
|
||||||
|
{
|
||||||
|
if (write_chunks[i].length <= remain)
|
||||||
|
{
|
||||||
|
remain -= write_chunks[i].length;
|
||||||
|
if (write_all)
|
||||||
|
write_chunks[i].copy(last_sect, remain);
|
||||||
|
else
|
||||||
|
cur_chunks.unshift(write_chunks[i]);
|
||||||
|
write_chunks.pop();
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
if (write_all)
|
||||||
|
write_chunks[i].copy(last_sect, 0, write_chunks[i].length - remain);
|
||||||
|
else
|
||||||
|
cur_chunks.unshift(write_chunks[i].slice(write_chunks[i].length - remain));
|
||||||
|
write_chunks[i] = write_chunks[i].slice(0, write_chunks[i].length - remain);
|
||||||
|
remain = 0;
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (write_all)
|
||||||
|
{
|
||||||
|
write_chunks.push(last_sect);
|
||||||
|
write_size += this.config.sector_size;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const chunk of cur_chunks)
|
||||||
|
{
|
||||||
|
cur_size += chunk.length;
|
||||||
|
}
|
||||||
|
return [ write_pos, write_chunks, write_size, cur_pos, cur_size, cur_chunks ];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* reqUids: string, // request-ids for log, usually joined by ':'
|
||||||
|
* keyContext: {
|
||||||
|
* // a lot of shit, basically all metadata
|
||||||
|
* bucketName,
|
||||||
|
* objectKey,
|
||||||
|
* owner?,
|
||||||
|
* namespace?,
|
||||||
|
* partNumber?,
|
||||||
|
* uploadId?,
|
||||||
|
* metaHeaders?,
|
||||||
|
* isDeleteMarker?,
|
||||||
|
* tagging?,
|
||||||
|
* contentType?,
|
||||||
|
* cacheControl?,
|
||||||
|
* contentDisposition?,
|
||||||
|
* contentEncoding?,
|
||||||
|
* },
|
||||||
|
* callback: (error, objectGetInfo: any) => void,
|
||||||
|
*/
|
||||||
|
put(stream, size, keyContext, reqUids, callback)
|
||||||
|
{
|
||||||
|
callback = once(callback);
|
||||||
|
this._getVolume(keyContext.bucketName, size)
|
||||||
|
.then(vol => this._put(vol, stream, size, keyContext, reqUids, callback))
|
||||||
|
.catch(callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
_put(vol, stream, size, keyContext, reqUids, callback)
|
||||||
|
{
|
||||||
|
const object_header: ObjectHeader = {
|
||||||
|
size,
|
||||||
|
key: keyContext.objectKey,
|
||||||
|
};
|
||||||
|
if (keyContext.partNumber)
|
||||||
|
{
|
||||||
|
object_header.part_num = keyContext.partNumber;
|
||||||
|
}
|
||||||
|
// header is: <8 bytes magic> <8 bytes flags> <8 bytes json length> <json>
|
||||||
|
const hdr_begin_buf = Buffer.alloc(24);
|
||||||
|
const hdr_json_buf = Buffer.from(JSON.stringify(object_header), 'utf-8');
|
||||||
|
hdr_begin_buf.write(OBJECT_MAGIC);
|
||||||
|
hdr_begin_buf.writeBigInt64LE(BigInt(hdr_json_buf.length), 16);
|
||||||
|
const object_header_buf = Buffer.concat([ hdr_begin_buf, hdr_json_buf ]);
|
||||||
|
const object_pos = vol.header.size;
|
||||||
|
const object_get_info = { volume: vol.id, offset: object_pos, hdrlen: object_header_buf.length, size };
|
||||||
|
let cur_pos = object_pos;
|
||||||
|
let cur_chunks = [ object_header_buf ];
|
||||||
|
let cur_size = object_header_buf.length;
|
||||||
|
let err: Error|null = null;
|
||||||
|
let waiting = 1; // 1 for end or error, 1 for each write request
|
||||||
|
vol.header.size += object_header_buf.length + size;
|
||||||
|
if (!this.config.pack_objects && (vol.header.size % this.config.sector_size))
|
||||||
|
{
|
||||||
|
vol.header.size += this.config.sector_size - (vol.header.size % this.config.sector_size);
|
||||||
|
}
|
||||||
|
const writeChunk = (last) =>
|
||||||
|
{
|
||||||
|
const sector_refs = [];
|
||||||
|
// Handle partial beginning
|
||||||
|
[ cur_pos, cur_size ] = this._bufferStart(vol, cur_pos, cur_size, cur_chunks, sector_refs);
|
||||||
|
// Handle partial end
|
||||||
|
let write_pos, write_chunks, write_size;
|
||||||
|
[ write_pos, write_chunks, write_size, cur_pos, cur_size, cur_chunks ] = this._bufferEnd(vol, cur_pos, cur_size, cur_chunks, sector_refs, last);
|
||||||
|
waiting++;
|
||||||
|
// FIXME: pool_id: maybe it should be stored in volume metadata to allow to migrate volumes?
|
||||||
|
this.cli.write(this.config.pool_id, vol.id, write_pos, write_chunks, (res) =>
|
||||||
|
{
|
||||||
|
for (const sect of sector_refs)
|
||||||
|
{
|
||||||
|
vol.partial_sectors[sect].refs--;
|
||||||
|
if (!vol.partial_sectors[sect].refs &&
|
||||||
|
vol.header.size >= sect+this.config.sector_size)
|
||||||
|
{
|
||||||
|
// Forget partial data when it's not needed anymore
|
||||||
|
delete(vol.partial_sectors[sect]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
waiting--;
|
||||||
|
if (res)
|
||||||
|
{
|
||||||
|
err = new Error(res);
|
||||||
|
waiting--;
|
||||||
|
}
|
||||||
|
if (!waiting)
|
||||||
|
{
|
||||||
|
callback(err, err ? null : object_get_info);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
// Stream data
|
||||||
|
stream.on('error', (e) =>
|
||||||
|
{
|
||||||
|
err = e;
|
||||||
|
waiting--;
|
||||||
|
if (!waiting)
|
||||||
|
{
|
||||||
|
callback(err, null);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
stream.on('end', () =>
|
||||||
|
{
|
||||||
|
if (err)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
waiting--;
|
||||||
|
if (cur_size)
|
||||||
|
{
|
||||||
|
// write last chunk
|
||||||
|
writeChunk(true);
|
||||||
|
}
|
||||||
|
if (!waiting)
|
||||||
|
{
|
||||||
|
callback(null, object_get_info);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
stream.on('data', (chunk) =>
|
||||||
|
{
|
||||||
|
if (err)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
cur_chunks.push(chunk);
|
||||||
|
cur_size += chunk.length;
|
||||||
|
if (cur_size >= this.config.write_chunk_size)
|
||||||
|
{
|
||||||
|
// got a complete chunk, write it out
|
||||||
|
writeChunk(false);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* objectGetInfo: {
|
||||||
|
* key: { volume, offset, hdrlen, size }, // from put
|
||||||
|
* size,
|
||||||
|
* start,
|
||||||
|
* dataStoreName,
|
||||||
|
* dataStoreETag,
|
||||||
|
* range,
|
||||||
|
* response: ServerResponse,
|
||||||
|
* },
|
||||||
|
* range?: [ start, end ], // like in HTTP - first byte index, last byte index
|
||||||
|
* callback: (error, readStream) => void,
|
||||||
|
*/
|
||||||
|
get(objectGetInfo, range, reqUids, callback)
|
||||||
|
{
|
||||||
|
if (!(objectGetInfo instanceof Object) || !objectGetInfo.key ||
|
||||||
|
!(objectGetInfo.key instanceof Object) || !objectGetInfo.key.volume ||
|
||||||
|
!objectGetInfo.key.offset || !objectGetInfo.key.hdrlen || !objectGetInfo.key.size)
|
||||||
|
{
|
||||||
|
throw new Error('objectGetInfo must be { key: { volume, offset, hdrlen, size } }, but is '+JSON.stringify(objectGetInfo));
|
||||||
|
}
|
||||||
|
const [ start, end ] = range || [];
|
||||||
|
if (start < 0 || end < 0 || end != null && start != null && end < start || start >= objectGetInfo.key.size)
|
||||||
|
{
|
||||||
|
throw new Error('Invalid range: '+start+'-'+end);
|
||||||
|
}
|
||||||
|
let offset = objectGetInfo.key.offset + objectGetInfo.key.hdrlen + (start || 0);
|
||||||
|
let len = objectGetInfo.key.size - (start || 0);
|
||||||
|
if (end)
|
||||||
|
{
|
||||||
|
const len2 = end - (start || 0) + 1;
|
||||||
|
if (len2 < len)
|
||||||
|
len = len2;
|
||||||
|
}
|
||||||
|
callback(null, new VitastorReadStream(this.cli, objectGetInfo.key.volume, offset, len, this.config));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* objectGetInfo: {
|
||||||
|
* key: { volume, offset, hdrlen, size }, // from put
|
||||||
|
* size,
|
||||||
|
* start,
|
||||||
|
* dataStoreName,
|
||||||
|
* dataStoreETag,
|
||||||
|
* range,
|
||||||
|
* response: ServerResponse,
|
||||||
|
* },
|
||||||
|
* callback: (error) => void,
|
||||||
|
*/
|
||||||
|
delete(objectGetInfo, reqUids, callback)
|
||||||
|
{
|
||||||
|
callback = once(callback);
|
||||||
|
this._delete(objectGetInfo, reqUids)
|
||||||
|
.then(callback)
|
||||||
|
.catch(callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
async _delete(objectGetInfo, reqUids)
|
||||||
|
{
|
||||||
|
if (!(objectGetInfo instanceof Object) || !objectGetInfo.key ||
|
||||||
|
!(objectGetInfo.key instanceof Object) || !objectGetInfo.key.volume ||
|
||||||
|
!objectGetInfo.key.offset || !objectGetInfo.key.hdrlen || !objectGetInfo.key.size)
|
||||||
|
{
|
||||||
|
throw new Error('objectGetInfo must be { key: { volume, offset, hdrlen, size } }, but is '+JSON.stringify(objectGetInfo));
|
||||||
|
}
|
||||||
|
const in_sect_pos = (objectGetInfo.key.offset % this.config.sector_size);
|
||||||
|
const sect_pos = objectGetInfo.key.offset - in_sect_pos;
|
||||||
|
const vol = this.volumes_by_id[objectGetInfo.key.volume];
|
||||||
|
if (vol && vol.partial_sectors[sect_pos])
|
||||||
|
{
|
||||||
|
// The sector may still be written to in corner cases
|
||||||
|
const sect = vol.partial_sectors[sect_pos];
|
||||||
|
const flags = sect.buffer.readBigInt64LE(in_sect_pos + 8);
|
||||||
|
if (!(flags & FLAG_DELETED))
|
||||||
|
{
|
||||||
|
const del_stat = this.volume_delete_stats[vol.id] = (this.volume_delete_stats[vol.id] || { count: 0, bytes: 0 });
|
||||||
|
del_stat.count++;
|
||||||
|
del_stat.bytes += objectGetInfo.key.size;
|
||||||
|
sect.buffer.writeBigInt64LE(flags | FLAG_DELETED, in_sect_pos + 8);
|
||||||
|
sect.refs++;
|
||||||
|
const err = await new Promise<any>(ok => this.cli.write(this.config.pool_id, objectGetInfo.key.volume, sect_pos, sect.buffer, ok));
|
||||||
|
sect.refs--;
|
||||||
|
if (err)
|
||||||
|
{
|
||||||
|
sect.buffer.writeBigInt64LE(0n, in_sect_pos + 8);
|
||||||
|
throw new Error(err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// RMW with CAS
|
||||||
|
const [ err, buf, version ] = await new Promise<[ any, Buffer, bigint ]>(ok => this.cli.read(
|
||||||
|
this.config.pool_id, objectGetInfo.key.volume, sect_pos, this.config.sector_size,
|
||||||
|
(err, buf, version) => ok([ err, buf, version ])
|
||||||
|
));
|
||||||
|
if (err)
|
||||||
|
{
|
||||||
|
throw new Error(err);
|
||||||
|
}
|
||||||
|
// FIXME What if JSON crosses sector boundary? Prevent it if we want to pack objects
|
||||||
|
const magic = buf.slice(in_sect_pos, in_sect_pos+8).toString();
|
||||||
|
const flags = buf.readBigInt64LE(in_sect_pos+8);
|
||||||
|
const json_len = Number(buf.readBigInt64LE(in_sect_pos+16));
|
||||||
|
let json_hdr;
|
||||||
|
if (in_sect_pos+24+json_len <= buf.length)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
json_hdr = JSON.parse(buf.slice(in_sect_pos+24, in_sect_pos+24+json_len).toString());
|
||||||
|
}
|
||||||
|
catch (e)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (magic !== OBJECT_MAGIC || !json_hdr || json_hdr.size !== objectGetInfo.key.size)
|
||||||
|
{
|
||||||
|
throw new Error(
|
||||||
|
'header of object with size '+objectGetInfo.key.size+
|
||||||
|
' bytes not found in volume '+objectGetInfo.key.volume+' at '+objectGetInfo.key.offset
|
||||||
|
);
|
||||||
|
}
|
||||||
|
else if (!(flags & FLAG_DELETED))
|
||||||
|
{
|
||||||
|
buf.writeBigInt64LE(flags | FLAG_DELETED, in_sect_pos + 8);
|
||||||
|
const err = await new Promise<any>(ok => this.cli.write(this.config.pool_id, objectGetInfo.key.volume, sect_pos, buf, { version: version+1n }, ok));
|
||||||
|
if (err == vitastor.EINTR)
|
||||||
|
{
|
||||||
|
// Retry
|
||||||
|
await this._delete(objectGetInfo, reqUids);
|
||||||
|
}
|
||||||
|
else if (err)
|
||||||
|
{
|
||||||
|
throw new Error(err);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// FIXME: Write deletion statistics to volumes
|
||||||
|
// FIXME: Implement defragmentation
|
||||||
|
const del_stat = this.volume_delete_stats[objectGetInfo.key.volume] = (this.volume_delete_stats[objectGetInfo.key.volume] || { count: 0, bytes: 0 });
|
||||||
|
del_stat.count++;
|
||||||
|
del_stat.bytes += objectGetInfo.key.size;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* config: full zenko server config,
|
||||||
|
* callback: (error, stats) => void, // stats is the returned statistics in arbitrary format
|
||||||
|
*/
|
||||||
|
getDiskUsage(config, reqUids, callback)
|
||||||
|
{
|
||||||
|
// FIXME: Iterate all volumes and return its sizes and deletion statistics, or maybe just sizes
|
||||||
|
callback(null, {});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class VitastorReadStream extends stream.Readable
|
||||||
|
{
|
||||||
|
constructor(cli, volume_id, offset, len, config, options = undefined)
|
||||||
|
{
|
||||||
|
super(options);
|
||||||
|
this.cli = cli;
|
||||||
|
this.volume_id = volume_id;
|
||||||
|
this.offset = offset;
|
||||||
|
this.end = offset + len;
|
||||||
|
this.pos = offset;
|
||||||
|
this.config = config;
|
||||||
|
this._reading = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
_read(n)
|
||||||
|
{
|
||||||
|
if (this._reading)
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// FIXME: Validate object header
|
||||||
|
const chunk_size = n && this.config.read_chunk_size < n ? n : this.config.read_chunk_size;
|
||||||
|
const read_offset = this.pos;
|
||||||
|
const round_offset = read_offset - (read_offset % this.config.sector_size);
|
||||||
|
let read_end = this.end <= read_offset+chunk_size ? this.end : read_offset+chunk_size;
|
||||||
|
const round_end = (read_end % this.config.sector_size)
|
||||||
|
? read_end + this.config.sector_size - (read_end % this.config.sector_size)
|
||||||
|
: read_end;
|
||||||
|
if (round_end <= this.end)
|
||||||
|
read_end = round_end;
|
||||||
|
this.pos = read_end;
|
||||||
|
if (read_end <= read_offset)
|
||||||
|
{
|
||||||
|
// EOF
|
||||||
|
this.push(null);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this._reading = true;
|
||||||
|
this.cli.read(this.config.pool_id, this.volume_id, round_offset, round_end-round_offset, (err, buf, version) =>
|
||||||
|
{
|
||||||
|
this._reading = false;
|
||||||
|
if (err)
|
||||||
|
{
|
||||||
|
this.destroy(new Error(err));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (read_offset != round_offset || round_end != read_end)
|
||||||
|
{
|
||||||
|
buf = buf.subarray(read_offset-round_offset, buf.length-(round_end-read_end));
|
||||||
|
}
|
||||||
|
if (this.push(buf))
|
||||||
|
{
|
||||||
|
this._read(n);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function once(callback)
|
||||||
|
{
|
||||||
|
let called = false;
|
||||||
|
return function()
|
||||||
|
{
|
||||||
|
if (!called)
|
||||||
|
{
|
||||||
|
called = true;
|
||||||
|
callback.apply(null, arguments);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = VitastorBackend;
|
|
@ -51,6 +51,36 @@ function _parseListEntries(entries) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** _parseLifecycleListEntries - parse the values returned in a lifeycle listing by metadata
|
||||||
|
* @param {object[]} entries - Version or Content entries in a metadata listing
|
||||||
|
* @param {string} entries[].key - metadata key
|
||||||
|
* @param {string} entries[].value - stringified object metadata
|
||||||
|
* @return {object} - mapped array with parsed value or JSON parsing err
|
||||||
|
*/
|
||||||
|
function _parseLifecycleListEntries(entries) {
|
||||||
|
return entries.map(entry => {
|
||||||
|
const tmp = JSON.parse(entry.value);
|
||||||
|
return {
|
||||||
|
key: entry.key,
|
||||||
|
value: {
|
||||||
|
Size: tmp['content-length'],
|
||||||
|
ETag: tmp['content-md5'],
|
||||||
|
VersionId: tmp.versionId,
|
||||||
|
IsNull: tmp.isNull,
|
||||||
|
LastModified: tmp['last-modified'],
|
||||||
|
Owner: {
|
||||||
|
DisplayName: tmp['owner-display-name'],
|
||||||
|
ID: tmp['owner-id'],
|
||||||
|
},
|
||||||
|
StorageClass: tmp['x-amz-storage-class'],
|
||||||
|
tags: tmp.tags,
|
||||||
|
staleDate: tmp.staleDate,
|
||||||
|
dataStoreName: tmp.dataStoreName,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/** parseListEntries - parse the values returned in a listing by metadata
|
/** parseListEntries - parse the values returned in a listing by metadata
|
||||||
* @param {object[]} entries - Version or Content entries in a metadata listing
|
* @param {object[]} entries - Version or Content entries in a metadata listing
|
||||||
* @param {string} entries[].key - metadata key
|
* @param {string} entries[].key - metadata key
|
||||||
|
@ -147,6 +177,42 @@ class MetadataWrapper {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
updateBucketCapabilities(bucketName, bucketMD, capabilityName, capacityField, capability, log, cb) {
|
||||||
|
log.debug('updating bucket capabilities in metadata');
|
||||||
|
// When concurrency update is not supported, we update the whole bucket metadata
|
||||||
|
if (!this.client.putBucketAttributesCapabilities) {
|
||||||
|
return this.updateBucket(bucketName, bucketMD, log, cb);
|
||||||
|
}
|
||||||
|
return this.client.putBucketAttributesCapabilities(bucketName, capabilityName, capacityField, capability,
|
||||||
|
log, err => {
|
||||||
|
if (err) {
|
||||||
|
log.debug('error from metadata', { implName: this.implName,
|
||||||
|
error: err });
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
log.trace('bucket capabilities updated in metadata');
|
||||||
|
return cb(err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
deleteBucketCapabilities(bucketName, bucketMD, capabilityName, capacityField, log, cb) {
|
||||||
|
log.debug('deleting bucket capabilities in metadata');
|
||||||
|
// When concurrency update is not supported, we update the whole bucket metadata
|
||||||
|
if (!this.client.deleteBucketAttributesCapability) {
|
||||||
|
return this.updateBucket(bucketName, bucketMD, log, cb);
|
||||||
|
}
|
||||||
|
return this.client.deleteBucketAttributesCapability(bucketName, capabilityName, capacityField,
|
||||||
|
log, err => {
|
||||||
|
if (err) {
|
||||||
|
log.debug('error from metadata', { implName: this.implName,
|
||||||
|
error: err });
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
log.trace('bucket capabilities deleted in metadata');
|
||||||
|
return cb(err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
getBucket(bucketName, log, cb) {
|
getBucket(bucketName, log, cb) {
|
||||||
log.debug('getting bucket from metadata');
|
log.debug('getting bucket from metadata');
|
||||||
this.client.getBucketAttributes(bucketName, log, (err, data) => {
|
this.client.getBucketAttributes(bucketName, log, (err, data) => {
|
||||||
|
@ -160,6 +226,19 @@ class MetadataWrapper {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getBucketQuota(bucketName, log, cb) {
|
||||||
|
log.debug('getting bucket quota from metadata');
|
||||||
|
this.client.getBucketAttributes(bucketName, log, (err, data) => {
|
||||||
|
if (err) {
|
||||||
|
log.debug('error from metadata', { implName: this.implName,
|
||||||
|
error: err });
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
const bucketInfo = BucketInfo.fromObj(data);
|
||||||
|
return cb(err, { quota: bucketInfo.getQuota() });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
deleteBucket(bucketName, log, cb) {
|
deleteBucket(bucketName, log, cb) {
|
||||||
log.debug('deleting bucket from metadata');
|
log.debug('deleting bucket from metadata');
|
||||||
this.client.deleteBucket(bucketName, log, err => {
|
this.client.deleteBucket(bucketName, log, err => {
|
||||||
|
@ -213,6 +292,25 @@ class MetadataWrapper {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getObjectsMD(bucketName, objNamesWithParams, log, cb) {
|
||||||
|
if (typeof this.client.getObjects !== 'function') {
|
||||||
|
log.debug('backend does not support get object metadata with batching', {
|
||||||
|
implName: this.implName,
|
||||||
|
});
|
||||||
|
return cb(errors.NotImplemented);
|
||||||
|
}
|
||||||
|
log.debug('getting objects from metadata', { objects: objNamesWithParams });
|
||||||
|
return this.client.getObjects(bucketName, objNamesWithParams, log, (err, data) => {
|
||||||
|
if (err) {
|
||||||
|
log.debug('error getting objects from metadata', { implName: this.implName, objects: objNamesWithParams,
|
||||||
|
err });
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
log.debug('objects retrieved from metadata', { objects: objNamesWithParams });
|
||||||
|
return cb(err, data);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
getObjectMD(bucketName, objName, params, log, cb) {
|
getObjectMD(bucketName, objName, params, log, cb) {
|
||||||
log.debug('getting object from metadata');
|
log.debug('getting object from metadata');
|
||||||
this.client.getObject(bucketName, objName, params, log, (err, data) => {
|
this.client.getObject(bucketName, objName, params, log, (err, data) => {
|
||||||
|
@ -226,7 +324,7 @@ class MetadataWrapper {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
deleteObjectMD(bucketName, objName, params, log, cb) {
|
deleteObjectMD(bucketName, objName, params, log, cb, originOp = 's3:ObjectRemoved:Delete') {
|
||||||
log.debug('deleting object from metadata');
|
log.debug('deleting object from metadata');
|
||||||
this.client.deleteObject(bucketName, objName, params, log, err => {
|
this.client.deleteObject(bucketName, objName, params, log, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -236,7 +334,7 @@ class MetadataWrapper {
|
||||||
}
|
}
|
||||||
log.debug('object deleted from metadata');
|
log.debug('object deleted from metadata');
|
||||||
return cb(err);
|
return cb(err);
|
||||||
});
|
}, originOp);
|
||||||
}
|
}
|
||||||
|
|
||||||
listObject(bucketName, listingParams, log, cb) {
|
listObject(bucketName, listingParams, log, cb) {
|
||||||
|
@ -279,6 +377,29 @@ class MetadataWrapper {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
listLifecycleObject(bucketName, listingParams, log, cb) {
|
||||||
|
log.debug('getting object listing for lifecycle from metadata');
|
||||||
|
this.client.listLifecycleObject(bucketName, listingParams, log, (err, data) => {
|
||||||
|
if (err) {
|
||||||
|
log.error('error from metadata', { implName: this.implName,
|
||||||
|
err });
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
log.debug('object listing for lifecycle retrieved from metadata');
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
data.Contents = parseListEntries(data.Contents, _parseLifecycleListEntries);
|
||||||
|
if (data.Contents instanceof Error) {
|
||||||
|
log.error('error parsing metadata listing for lifecycle', {
|
||||||
|
error: data.Contents,
|
||||||
|
listingType: listingParams.listingType,
|
||||||
|
method: 'listLifecycleObject',
|
||||||
|
});
|
||||||
|
return cb(errors.InternalError);
|
||||||
|
}
|
||||||
|
return cb(null, data);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
listMultipartUploads(bucketName, listingParams, log, cb) {
|
listMultipartUploads(bucketName, listingParams, log, cb) {
|
||||||
this.client.listMultipartUploads(bucketName, listingParams, log,
|
this.client.listMultipartUploads(bucketName, listingParams, log,
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
|
@ -427,6 +548,139 @@ class MetadataWrapper {
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Put bucket indexes
|
||||||
|
*
|
||||||
|
* indexSpec format:
|
||||||
|
* [
|
||||||
|
* { key:[ { key: "", order: 1 } ... ], name: <id 1>, ... , < backend options> },
|
||||||
|
* ...
|
||||||
|
* { key:[ { key: "", order: 1 } ... ], name: <id n>, ... },
|
||||||
|
* ]
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @param {String} bucketName bucket name
|
||||||
|
* @param {Array<Object>} indexSpecs index specification
|
||||||
|
* @param {Object} log logger
|
||||||
|
* @param {Function} cb callback
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
putBucketIndexes(bucketName, indexSpecs, log, cb) {
|
||||||
|
log.debug('put bucket indexes');
|
||||||
|
|
||||||
|
if (typeof this.client.putBucketIndexes !== 'function') {
|
||||||
|
log.error('error from metadata', {
|
||||||
|
method: 'putBucketIndexes',
|
||||||
|
error: errors.NotImplemented,
|
||||||
|
implName: this.implName,
|
||||||
|
});
|
||||||
|
return cb(errors.NotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.client.putBucketIndexes(bucketName, indexSpecs, log, err => {
|
||||||
|
if (err) {
|
||||||
|
log.debug('error from metadata', {
|
||||||
|
method: 'putBucketIndexes',
|
||||||
|
error: err,
|
||||||
|
implName: this.implName,
|
||||||
|
});
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
return cb(null);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete bucket indexes
|
||||||
|
*
|
||||||
|
* indexSpec format:
|
||||||
|
* [
|
||||||
|
* { key:[ { key: "", order: 1 } ... ], name: <id 1>, ... , < backend options> },
|
||||||
|
* ...
|
||||||
|
* { key:[ { key: "", order: 1 } ... ], name: <id n>, ... },
|
||||||
|
* ]
|
||||||
|
*
|
||||||
|
*
|
||||||
|
* @param {String} bucketName bucket name
|
||||||
|
* @param {Array<Object>} indexSpecs index specification
|
||||||
|
* @param {Object} log logger
|
||||||
|
* @param {Function} cb callback
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
deleteBucketIndexes(bucketName, indexSpecs, log, cb) {
|
||||||
|
log.debug('delete bucket indexes');
|
||||||
|
|
||||||
|
if (typeof this.client.deleteBucketIndexes !== 'function') {
|
||||||
|
log.error('error from metadata', {
|
||||||
|
method: 'deleteBucketIndexes',
|
||||||
|
error: errors.NotImplemented,
|
||||||
|
implName: this.implName,
|
||||||
|
});
|
||||||
|
return cb(errors.NotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.client.deleteBucketIndexes(bucketName, indexSpecs, log, err => {
|
||||||
|
if (err) {
|
||||||
|
log.error('error from metadata', {
|
||||||
|
method: 'deleteBucketIndexes',
|
||||||
|
error: err,
|
||||||
|
implName: this.implName,
|
||||||
|
});
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
return cb(null);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
getBucketIndexes(bucketName, log, cb) {
|
||||||
|
log.debug('get bucket indexes');
|
||||||
|
|
||||||
|
if (typeof this.client.getBucketIndexes !== 'function') {
|
||||||
|
log.debug('error from metadata', {
|
||||||
|
method: 'getBucketIndexes',
|
||||||
|
error: errors.NotImplemented,
|
||||||
|
implName: this.implName,
|
||||||
|
});
|
||||||
|
return cb(errors.NotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.client.getBucketIndexes(bucketName, log, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
log.debug('error from metadata', {
|
||||||
|
method: 'getBucketIndexes',
|
||||||
|
error: err,
|
||||||
|
implName: this.implName,
|
||||||
|
});
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
return cb(null, res);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
getIndexingJobs(log, cb) {
|
||||||
|
if (typeof this.client.getIndexingJobs !== 'function') {
|
||||||
|
log.debug('error from metadata', {
|
||||||
|
method: 'getIndexingJobs',
|
||||||
|
error: errors.NotImplemented,
|
||||||
|
implName: this.implName,
|
||||||
|
});
|
||||||
|
return cb(errors.NotImplemented);
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.client.getIndexingJobs(log, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
log.debug('error from metadata', {
|
||||||
|
method: 'getBucketIndexes',
|
||||||
|
error: err,
|
||||||
|
implName: this.implName,
|
||||||
|
});
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
return cb(null, res);
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = MetadataWrapper;
|
module.exports = MetadataWrapper;
|
||||||
|
|
|
@ -110,6 +110,17 @@ class BucketClientInterface {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
listLifecycleObject(bucketName, params, log, cb) {
|
||||||
|
this.client.listObject(bucketName, log.getSerializedUids(), params,
|
||||||
|
(err, data) => {
|
||||||
|
if (err) {
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
return cb(null, JSON.parse(data));
|
||||||
|
});
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
listMultipartUploads(bucketName, params, log, cb) {
|
listMultipartUploads(bucketName, params, log, cb) {
|
||||||
this.client.listObject(bucketName, log.getSerializedUids(), params,
|
this.client.listObject(bucketName, log.getSerializedUids(), params,
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
|
|
|
@ -325,6 +325,10 @@ class BucketFileInterface {
|
||||||
return this.internalListObject(bucketName, params, log, cb);
|
return this.internalListObject(bucketName, params, log, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
listLifecycleObject(bucketName, params, log, cb) {
|
||||||
|
return this.internalListObject(bucketName, params, log, cb);
|
||||||
|
}
|
||||||
|
|
||||||
listMultipartUploads(bucketName, params, log, cb) {
|
listMultipartUploads(bucketName, params, log, cb) {
|
||||||
return this.internalListObject(bucketName, params, log, cb);
|
return this.internalListObject(bucketName, params, log, cb);
|
||||||
}
|
}
|
||||||
|
|
|
@ -318,6 +318,10 @@ const metastore = {
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
||||||
|
listLifecycleObject(bucketName, params, log, cb) {
|
||||||
|
return process.nextTick(cb, errors.NotImplemented);
|
||||||
|
},
|
||||||
|
|
||||||
listMultipartUploads(bucketName, listingParams, log, cb) {
|
listMultipartUploads(bucketName, listingParams, log, cb) {
|
||||||
process.nextTick(() => {
|
process.nextTick(() => {
|
||||||
metastore.getBucketAttributes(bucketName, log, (err, bucket) => {
|
metastore.getBucketAttributes(bucketName, log, (err, bucket) => {
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -55,6 +55,22 @@ class MongoReadStream extends Readable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (options.lastModified) {
|
||||||
|
query['value.last-modified'] = {};
|
||||||
|
|
||||||
|
if (options.lastModified.lt) {
|
||||||
|
query['value.last-modified'].$lt = options.lastModified.lt;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (options.dataStoreName) {
|
||||||
|
query['value.dataStoreName'] = {};
|
||||||
|
|
||||||
|
if (options.dataStoreName.ne) {
|
||||||
|
query['value.dataStoreName'].$ne = options.dataStoreName.ne;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (!Object.keys(query._id).length) {
|
if (!Object.keys(query._id).length) {
|
||||||
delete query._id;
|
delete query._id;
|
||||||
}
|
}
|
||||||
|
@ -69,7 +85,8 @@ class MongoReadStream extends Readable {
|
||||||
Object.assign(query, searchOptions);
|
Object.assign(query, searchOptions);
|
||||||
}
|
}
|
||||||
|
|
||||||
this._cursor = c.find(query).sort({
|
const projection = { 'value.location': 0 };
|
||||||
|
this._cursor = c.find(query, { projection }).sort({
|
||||||
_id: options.reverse ? -1 : 1,
|
_id: options.reverse ? -1 : 1,
|
||||||
});
|
});
|
||||||
if (options.limit && options.limit !== -1) {
|
if (options.limit && options.limit !== -1) {
|
||||||
|
@ -85,15 +102,10 @@ class MongoReadStream extends Readable {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
this._cursor.next((err, doc) => {
|
this._cursor.next().then(doc => {
|
||||||
if (this._destroyed) {
|
if (this._destroyed) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (err) {
|
|
||||||
this.emit('error', err);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
let key = undefined;
|
let key = undefined;
|
||||||
let value = undefined;
|
let value = undefined;
|
||||||
|
|
||||||
|
@ -117,6 +129,12 @@ class MongoReadStream extends Readable {
|
||||||
value,
|
value,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
}).catch(err => {
|
||||||
|
if (this._destroyed) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.emit('error', err);
|
||||||
|
return;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -126,7 +144,7 @@ class MongoReadStream extends Readable {
|
||||||
}
|
}
|
||||||
this._destroyed = true;
|
this._destroyed = true;
|
||||||
|
|
||||||
this._cursor.close(err => {
|
this._cursor.close().catch(err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
this.emit('error', err);
|
this.emit('error', err);
|
||||||
return;
|
return;
|
||||||
|
|
|
@ -185,6 +185,48 @@ function formatVersionKey(key, versionId, vFormat) {
|
||||||
return formatVersionKeyV0(key, versionId);
|
return formatVersionKeyV0(key, versionId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function indexFormatMongoArrayToObject(mongoIndexArray) {
|
||||||
|
const indexObj = [];
|
||||||
|
|
||||||
|
for (const idx of mongoIndexArray) {
|
||||||
|
const keys = [];
|
||||||
|
let entries = [];
|
||||||
|
|
||||||
|
if (idx.key instanceof Map) {
|
||||||
|
entries = idx.key.entries();
|
||||||
|
} else {
|
||||||
|
entries = Object.entries(idx.key);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const k of entries) {
|
||||||
|
keys.push({ key: k[0], order: k[1] });
|
||||||
|
}
|
||||||
|
|
||||||
|
indexObj.push({ name: idx.name, keys });
|
||||||
|
}
|
||||||
|
|
||||||
|
return indexObj;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function indexFormatObjectToMongoArray(indexObj) {
|
||||||
|
const mongoIndexArray = [];
|
||||||
|
|
||||||
|
for (const idx of indexObj) {
|
||||||
|
const key = new Map();
|
||||||
|
|
||||||
|
for (const k of idx.keys) {
|
||||||
|
key.set(k.key, k.order);
|
||||||
|
}
|
||||||
|
|
||||||
|
// copy all field except keys from idx
|
||||||
|
// eslint-disable-next-line
|
||||||
|
const { keys: _, ...toCopy } = idx;
|
||||||
|
mongoIndexArray.push(Object.assign(toCopy, { name: idx.name, key }));
|
||||||
|
}
|
||||||
|
|
||||||
|
return mongoIndexArray;
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
credPrefix,
|
credPrefix,
|
||||||
|
@ -195,4 +237,6 @@ module.exports = {
|
||||||
translateConditions,
|
translateConditions,
|
||||||
formatMasterKey,
|
formatMasterKey,
|
||||||
formatVersionKey,
|
formatVersionKey,
|
||||||
|
indexFormatMongoArrayToObject,
|
||||||
|
indexFormatObjectToMongoArray,
|
||||||
};
|
};
|
||||||
|
|
|
@ -10,21 +10,21 @@ function trySetDirSyncFlag(path) {
|
||||||
|
|
||||||
const GETFLAGS = 2148034049;
|
const GETFLAGS = 2148034049;
|
||||||
const SETFLAGS = 1074292226;
|
const SETFLAGS = 1074292226;
|
||||||
const FS_DIRSYNC_FL = 65536;
|
const FS_DIRSYNC_FL = 65536n;
|
||||||
const buffer = Buffer.alloc(8, 0);
|
const buffer = Buffer.alloc(8, 0);
|
||||||
const pathFD = fs.openSync(path, 'r');
|
const pathFD = fs.openSync(path, 'r');
|
||||||
const status = ioctl(pathFD, GETFLAGS, buffer);
|
const status = ioctl(pathFD, GETFLAGS, buffer);
|
||||||
assert.strictEqual(status, 0);
|
assert.strictEqual(status, 0);
|
||||||
const currentFlags = buffer.readUIntLE(0, 8);
|
const currentFlags = buffer.readBigInt64LE(0);
|
||||||
const flags = currentFlags | FS_DIRSYNC_FL;
|
const flags = currentFlags | FS_DIRSYNC_FL;
|
||||||
buffer.writeUIntLE(flags, 0, 8);
|
buffer.writeBigInt64LE(flags, 0);
|
||||||
const status2 = ioctl(pathFD, SETFLAGS, buffer);
|
const status2 = ioctl(pathFD, SETFLAGS, buffer);
|
||||||
assert.strictEqual(status2, 0);
|
assert.strictEqual(status2, 0);
|
||||||
fs.closeSync(pathFD);
|
fs.closeSync(pathFD);
|
||||||
const pathFD2 = fs.openSync(path, 'r');
|
const pathFD2 = fs.openSync(path, 'r');
|
||||||
const confirmBuffer = Buffer.alloc(8, 0);
|
const confirmBuffer = Buffer.alloc(8, 0);
|
||||||
ioctl(pathFD2, GETFLAGS, confirmBuffer);
|
ioctl(pathFD2, GETFLAGS, confirmBuffer);
|
||||||
assert.strictEqual(confirmBuffer.readUIntLE(0, 8),
|
assert.strictEqual(confirmBuffer.readBigInt64LE(0),
|
||||||
currentFlags | FS_DIRSYNC_FL, 'FS_DIRSYNC_FL not set');
|
currentFlags | FS_DIRSYNC_FL, 'FS_DIRSYNC_FL not set');
|
||||||
fs.closeSync(pathFD2);
|
fs.closeSync(pathFD2);
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ import { VersioningConstants } from './constants';
|
||||||
const VID_SEP = VersioningConstants.VersionId.Separator;
|
const VID_SEP = VersioningConstants.VersionId.Separator;
|
||||||
/**
|
/**
|
||||||
* Class for manipulating an object version.
|
* Class for manipulating an object version.
|
||||||
* The format of a version: { isNull, isDeleteMarker, versionId, otherInfo }
|
* The format of a version: { isNull, isNull2, isDeleteMarker, versionId, otherInfo }
|
||||||
*
|
*
|
||||||
* @note Some of these functions are optimized based on string search
|
* @note Some of these functions are optimized based on string search
|
||||||
* prior to a full JSON parse/stringify. (Vinh: 18K op/s are achieved
|
* prior to a full JSON parse/stringify. (Vinh: 18K op/s are achieved
|
||||||
|
@ -13,24 +13,31 @@ const VID_SEP = VersioningConstants.VersionId.Separator;
|
||||||
export class Version {
|
export class Version {
|
||||||
version: {
|
version: {
|
||||||
isNull?: boolean;
|
isNull?: boolean;
|
||||||
|
isNull2?: boolean;
|
||||||
isDeleteMarker?: boolean;
|
isDeleteMarker?: boolean;
|
||||||
versionId?: string;
|
versionId?: string;
|
||||||
isPHD?: boolean;
|
isPHD?: boolean;
|
||||||
|
nullVersionId?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new version instantiation from its data object.
|
* Create a new version instantiation from its data object.
|
||||||
* @param version - the data object to instantiate
|
* @param version - the data object to instantiate
|
||||||
* @param version.isNull - is a null version
|
* @param version.isNull - is a null version
|
||||||
|
* @param version.isNull2 - Whether new version is null or not AND has
|
||||||
|
* been put with a Cloudserver handling null keys (i.e. supporting
|
||||||
|
* S3C-7352)
|
||||||
* @param version.isDeleteMarker - is a delete marker
|
* @param version.isDeleteMarker - is a delete marker
|
||||||
* @param version.versionId - the version id
|
* @param version.versionId - the version id
|
||||||
* @constructor
|
* @constructor
|
||||||
*/
|
*/
|
||||||
constructor(version?: {
|
constructor(version?: {
|
||||||
isNull?: boolean;
|
isNull?: boolean;
|
||||||
|
isNull2?: boolean;
|
||||||
isDeleteMarker?: boolean;
|
isDeleteMarker?: boolean;
|
||||||
versionId?: string;
|
versionId?: string;
|
||||||
isPHD?: boolean;
|
isPHD?: boolean;
|
||||||
|
nullVersionId?: string;
|
||||||
}) {
|
}) {
|
||||||
this.version = version || {};
|
this.version = version || {};
|
||||||
}
|
}
|
||||||
|
@ -83,6 +90,33 @@ export class Version {
|
||||||
return `{ "isPHD": true, "versionId": "${versionId}" }`;
|
return `{ "isPHD": true, "versionId": "${versionId}" }`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Appends a key-value pair to a JSON object represented as a string. It adds
|
||||||
|
* a comma if the object is not empty (i.e., not just '{}'). It assumes the input
|
||||||
|
* string is formatted as a JSON object.
|
||||||
|
*
|
||||||
|
* @param {string} stringifiedObject The JSON object as a string to which the key-value pair will be appended.
|
||||||
|
* @param {string} key The key to append to the JSON object.
|
||||||
|
* @param {string} value The value associated with the key to append to the JSON object.
|
||||||
|
* @returns {string} The updated JSON object as a string with the new key-value pair appended.
|
||||||
|
* @example
|
||||||
|
* _jsonAppend('{"existingKey":"existingValue"}', 'newKey', 'newValue');
|
||||||
|
* // returns '{"existingKey":"existingValue","newKey":"newValue"}'
|
||||||
|
*/
|
||||||
|
static _jsonAppend(stringifiedObject: string, key: string, value: string): string {
|
||||||
|
// stringifiedObject value has the format of '{...}'
|
||||||
|
let index = stringifiedObject.length - 2;
|
||||||
|
while (stringifiedObject.charAt(index) === ' ') {
|
||||||
|
index -= 1;
|
||||||
|
}
|
||||||
|
const needComma = stringifiedObject.charAt(index) !== '{';
|
||||||
|
return (
|
||||||
|
`${stringifiedObject.slice(0, stringifiedObject.length - 1)}` +
|
||||||
|
(needComma ? ',' : '') +
|
||||||
|
`"${key}":"${value}"}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Put versionId into an object in the (cheap) way of string manipulation,
|
* Put versionId into an object in the (cheap) way of string manipulation,
|
||||||
* instead of the more expensive alternative parsing and stringification.
|
* instead of the more expensive alternative parsing and stringification.
|
||||||
|
@ -93,14 +127,32 @@ export class Version {
|
||||||
*/
|
*/
|
||||||
static appendVersionId(value: string, versionId: string): string {
|
static appendVersionId(value: string, versionId: string): string {
|
||||||
// assuming value has the format of '{...}'
|
// assuming value has the format of '{...}'
|
||||||
let index = value.length - 2;
|
return Version._jsonAppend(value, 'versionId', versionId);
|
||||||
while (value.charAt(index--) === ' ');
|
}
|
||||||
const comma = value.charAt(index + 1) !== '{';
|
|
||||||
return (
|
/**
|
||||||
`${value.slice(0, value.length - 1)}` + // eslint-disable-line
|
* Updates or appends a `nullVersionId` property to a JSON-formatted string.
|
||||||
(comma ? ',' : '') +
|
* This function first checks if the `nullVersionId` property already exists within the input string.
|
||||||
`"versionId":"${versionId}"}`
|
* If it exists, the function updates the `nullVersionId` with the new value provided.
|
||||||
);
|
* If it does not exist, the function appends a `nullVersionId` property with the provided value.
|
||||||
|
*
|
||||||
|
* @static
|
||||||
|
* @param {string} value - The JSON-formatted string that may already contain a `nullVersionId` property.
|
||||||
|
* @param {string} nullVersionId - The new value for the `nullVersionId` property to be updated or appended.
|
||||||
|
* @returns {string} The updated JSON-formatted string with the new `nullVersionId` value.
|
||||||
|
*/
|
||||||
|
static updateOrAppendNullVersionId(value: string, nullVersionId: string): string {
|
||||||
|
// Check if "nullVersionId" already exists in the string
|
||||||
|
const nullVersionIdPattern = /"nullVersionId":"[^"]*"/;
|
||||||
|
const nullVersionIdExists = nullVersionIdPattern.test(value);
|
||||||
|
|
||||||
|
if (nullVersionIdExists) {
|
||||||
|
// Replace the existing nullVersionId with the new one
|
||||||
|
return value.replace(nullVersionIdPattern, `"nullVersionId":"${nullVersionId}"`);
|
||||||
|
} else {
|
||||||
|
// Append nullVersionId
|
||||||
|
return Version._jsonAppend(value, 'nullVersionId', nullVersionId);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -121,6 +173,19 @@ export class Version {
|
||||||
return this.version.isNull ?? false;
|
return this.version.isNull ?? false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a version is a null version and has
|
||||||
|
* been put with a Cloudserver handling null keys (i.e. supporting
|
||||||
|
* S3C-7352).
|
||||||
|
*
|
||||||
|
* @return - stating if the value is a null version and has
|
||||||
|
* been put with a Cloudserver handling null keys (i.e. supporting
|
||||||
|
* S3C-7352).
|
||||||
|
*/
|
||||||
|
isNull2Version(): boolean {
|
||||||
|
return this.version.isNull2 ?? false;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if a stringified object is a delete marker.
|
* Check if a stringified object is a delete marker.
|
||||||
*
|
*
|
||||||
|
@ -190,6 +255,19 @@ export class Version {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mark that the null version has been put with a Cloudserver handling null keys (i.e. supporting S3C-7352)
|
||||||
|
*
|
||||||
|
* If `isNull2` is set, `isNull` is also set to maintain consistency.
|
||||||
|
* Explicitly setting both avoids misunderstandings and mistakes in future updates or fixes.
|
||||||
|
* @return - the updated version
|
||||||
|
*/
|
||||||
|
setNull2Version() {
|
||||||
|
this.version.isNull2 = true;
|
||||||
|
this.version.isNull = true;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Serialize the version.
|
* Serialize the version.
|
||||||
*
|
*
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
|
import { RequestLogger } from 'werelogs';
|
||||||
|
|
||||||
import errors, { ArsenalError } from '../errors';
|
import errors, { ArsenalError } from '../errors';
|
||||||
import { Version } from './Version';
|
import { Version } from './Version';
|
||||||
import { generateVersionId as genVID } from './VersionID';
|
import { generateVersionId as genVID, getInfVid } from './VersionID';
|
||||||
import WriteCache from './WriteCache';
|
import WriteCache from './WriteCache';
|
||||||
import WriteGatheringManager from './WriteGatheringManager';
|
import WriteGatheringManager from './WriteGatheringManager';
|
||||||
|
|
||||||
|
@ -22,11 +24,11 @@ function getPrefixUpperBoundary(prefix: string): string {
|
||||||
return prefix;
|
return prefix;
|
||||||
}
|
}
|
||||||
|
|
||||||
function formatVersionKey(key: string, versionId: string) {
|
function formatVersionKey(key: string, versionId: string): string {
|
||||||
return `${key}${VID_SEP}${versionId}`;
|
return `${key}${VID_SEP}${versionId}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function formatCacheKey(db: string, key: string) {
|
function formatCacheKey(db: string, key: string): string {
|
||||||
// using double VID_SEP to make sure the cache key is unique
|
// using double VID_SEP to make sure the cache key is unique
|
||||||
return `${db}${VID_SEP}${VID_SEP}${key}`;
|
return `${db}${VID_SEP}${VID_SEP}${key}`;
|
||||||
}
|
}
|
||||||
|
@ -89,8 +91,10 @@ export default class VersioningRequestProcessor {
|
||||||
callback: (error: ArsenalError | null, data?: any) => void,
|
callback: (error: ArsenalError | null, data?: any) => void,
|
||||||
) {
|
) {
|
||||||
const { db, key, options } = request;
|
const { db, key, options } = request;
|
||||||
|
logger.addDefaultFields({ bucket: db, key, options });
|
||||||
if (options && options.versionId) {
|
if (options && options.versionId) {
|
||||||
const versionKey = formatVersionKey(key, options.versionId);
|
const keyVersionId = options.versionId === 'null' ? '' : options.versionId;
|
||||||
|
const versionKey = formatVersionKey(key, keyVersionId);
|
||||||
return this.wgm.get({ db, key: versionKey }, logger, callback);
|
return this.wgm.get({ db, key: versionKey }, logger, callback);
|
||||||
}
|
}
|
||||||
return this.wgm.get(request, logger, (err, data) => {
|
return this.wgm.get(request, logger, (err, data) => {
|
||||||
|
@ -101,13 +105,82 @@ export default class VersioningRequestProcessor {
|
||||||
if (!Version.isPHD(data)) {
|
if (!Version.isPHD(data)) {
|
||||||
return callback(null, data);
|
return callback(null, data);
|
||||||
}
|
}
|
||||||
logger.debug('master version is a PHD, getting the latest version',
|
logger.debug('master version is a PHD, getting the latest version');
|
||||||
{ db, key });
|
|
||||||
// otherwise, need to search for the latest version
|
// otherwise, need to search for the latest version
|
||||||
return this.getByListing(request, logger, callback);
|
return this.getByListing(request, logger, callback);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper that lists version keys for a certain object key,
|
||||||
|
* sorted by version ID. If a null key exists for this object, it is
|
||||||
|
* sorted at the appropriate position by its internal version ID and
|
||||||
|
* its key will be appended its internal version ID.
|
||||||
|
*
|
||||||
|
* @param {string} db - bucket name
|
||||||
|
* @param {string} key - object key
|
||||||
|
* @param {object} [options] - options object
|
||||||
|
* @param {number} [options.limit] - max version keys returned
|
||||||
|
* (returns all object version keys if not specified)
|
||||||
|
* @param {object} logger - logger of the request
|
||||||
|
* @param {function} callback - callback(err, {object|null} master, {array} versions)
|
||||||
|
* master: { key, value }
|
||||||
|
* versions: [{ key, value }, ...]
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
listVersionKeys(db, key, options, logger, callback) {
|
||||||
|
const { limit } = options || {};
|
||||||
|
const listingParams: any = {};
|
||||||
|
let nullKeyLength;
|
||||||
|
// include master key in v0 listing
|
||||||
|
listingParams.gte = key;
|
||||||
|
listingParams.lt = `${key}${VID_SEPPLUS}`;
|
||||||
|
if (limit !== undefined) {
|
||||||
|
// may have to skip master + null key, so 2 extra to list in the worst case
|
||||||
|
listingParams.limit = limit + 2;
|
||||||
|
}
|
||||||
|
nullKeyLength = key.length + 1;
|
||||||
|
return this.wgm.list({
|
||||||
|
db,
|
||||||
|
params: listingParams,
|
||||||
|
}, logger, (err, rawVersions) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
if (rawVersions.length === 0) {
|
||||||
|
// object does not have any version key
|
||||||
|
return callback(null, null, []);
|
||||||
|
}
|
||||||
|
let versions = rawVersions;
|
||||||
|
let master;
|
||||||
|
// in v0 there is always a master key before versions
|
||||||
|
master = versions.shift();
|
||||||
|
if (versions.length === 0) {
|
||||||
|
return callback(null, master, []);
|
||||||
|
}
|
||||||
|
const firstItem = versions[0];
|
||||||
|
if (firstItem.key.length === nullKeyLength) {
|
||||||
|
// first version is the null key
|
||||||
|
const nullVersion = Version.from(firstItem.value);
|
||||||
|
const nullVersionKey = formatVersionKey(key, <string> nullVersion.getVersionId());
|
||||||
|
// find null key's natural versioning order in the list
|
||||||
|
let nullPos = versions.findIndex(item => item.key > nullVersionKey);
|
||||||
|
if (nullPos === -1) {
|
||||||
|
nullPos = versions.length;
|
||||||
|
}
|
||||||
|
// move null key at the correct position and append its real version ID to the key
|
||||||
|
versions = versions.slice(1, nullPos)
|
||||||
|
.concat([{ key: nullVersionKey, value: firstItem.value, isNullKey: true }])
|
||||||
|
.concat(versions.slice(nullPos));
|
||||||
|
}
|
||||||
|
if (limit !== undefined) {
|
||||||
|
// truncate versions to 'limit' entries
|
||||||
|
versions.splice(limit);
|
||||||
|
}
|
||||||
|
return callback(null, master, versions);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the latest version of an object when the master version is a place
|
* Get the latest version of an object when the master version is a place
|
||||||
* holder for deletion. For any given pair of db and key, only a
|
* holder for deletion. For any given pair of db and key, only a
|
||||||
|
@ -132,39 +205,39 @@ export default class VersioningRequestProcessor {
|
||||||
if (!this.enqueueGet(request, logger, callback)) {
|
if (!this.enqueueGet(request, logger, callback)) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
logger.info('start listing latest versions', { request });
|
logger.info('start listing latest versions');
|
||||||
// otherwise, search for the latest version
|
// otherwise, search for the latest version
|
||||||
const cacheKey = formatCacheKey(request.db, request.key);
|
const cacheKey = formatCacheKey(request.db, request.key);
|
||||||
clearTimeout(this.repairing[cacheKey]);
|
clearTimeout(this.repairing[cacheKey]);
|
||||||
delete this.repairing[cacheKey];
|
delete this.repairing[cacheKey];
|
||||||
const req = { db: request.db, params: {
|
return this.listVersionKeys(request.db, request.key, {
|
||||||
gte: request.key, lt: `${request.key}${VID_SEPPLUS}`, limit: 2 } };
|
limit: 1,
|
||||||
return this.wgm.list(req, logger, (err, list) => {
|
}, logger, (err, master, versions) => {
|
||||||
logger.info('listing latest versions done', { err, list });
|
logger.info('listing latest versions done', { err, master, versions });
|
||||||
if (err) {
|
if (err) {
|
||||||
return this.dequeueGet(request, err);
|
return this.dequeueGet(request, err);
|
||||||
}
|
}
|
||||||
// the complete list of versions is always: mst, v1, v2, ...
|
if (!master) {
|
||||||
if (list.length === 0) {
|
|
||||||
return this.dequeueGet(request, errors.ObjNotFound);
|
return this.dequeueGet(request, errors.ObjNotFound);
|
||||||
}
|
}
|
||||||
if (!Version.isPHD(list[0].value)) {
|
if (!Version.isPHD(master.value)) {
|
||||||
return this.dequeueGet(request, null, list[0].value);
|
return this.dequeueGet(request, null, master.value);
|
||||||
}
|
}
|
||||||
if (list.length === 1) {
|
if (versions.length === 0) {
|
||||||
logger.info('no other versions', { request });
|
logger.info('no other versions');
|
||||||
this.dequeueGet(request, errors.ObjNotFound);
|
this.dequeueGet(request, errors.ObjNotFound);
|
||||||
return this.repairMaster(request, logger,
|
return this.repairMaster(request, logger,
|
||||||
{ type: 'del',
|
{ type: 'del', value: master.value });
|
||||||
value: list[0].value });
|
|
||||||
}
|
}
|
||||||
// need repair
|
// need repair
|
||||||
logger.info('update master by the latest version', { request });
|
logger.info('update master by the latest version');
|
||||||
const nextValue = list[1].value;
|
const next = {
|
||||||
this.dequeueGet(request, null, nextValue);
|
value: versions[0].value,
|
||||||
|
isNullKey: versions[0].isNullKey,
|
||||||
|
};
|
||||||
|
this.dequeueGet(request, null, next.value);
|
||||||
return this.repairMaster(request, logger,
|
return this.repairMaster(request, logger,
|
||||||
{ type: 'put', value: list[0].value,
|
{ type: 'put', value: master.value, next });
|
||||||
nextValue });
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -227,42 +300,60 @@ export default class VersioningRequestProcessor {
|
||||||
* RepdConnection format { db, key
|
* RepdConnection format { db, key
|
||||||
* [, value][, type], method, options }
|
* [, value][, type], method, options }
|
||||||
* @param logger - logger
|
* @param logger - logger
|
||||||
* @param hints - storing reparing hints
|
* @param {object} data - storing reparing hints
|
||||||
* @param hints.type - type of repair operation ('put' or 'del')
|
* @param {string} data.value - existing value of the master version (PHD)
|
||||||
* @param hints.value - existing value of the master version (PHD)
|
* @param {object} data.next - the suggested latest version
|
||||||
* @param hints.nextValue - the suggested latest version
|
* @param {string} data.next.value - the suggested latest version value
|
||||||
(for 'put')
|
* @param {boolean} data.next.isNullKey - whether the suggested
|
||||||
|
* latest version is a null key
|
||||||
* @return - to finish the call
|
* @return - to finish the call
|
||||||
*/
|
*/
|
||||||
repairMaster(request: any, logger: RequestLogger, hints: {
|
repairMaster(request: any, logger: RequestLogger, data: {
|
||||||
type: 'put' | 'del';
|
type: 'put' | 'del';
|
||||||
value: string;
|
value: string;
|
||||||
nextValue?: string;
|
next?: {
|
||||||
|
value: string;
|
||||||
|
isNullKey: boolean;
|
||||||
|
};
|
||||||
}) {
|
}) {
|
||||||
const { db, key } = request;
|
const { db, key } = request;
|
||||||
logger.info('start repair process', { request });
|
logger.info('start repair process');
|
||||||
this.writeCache.get({ db, key }, logger, (err, value) => {
|
this.writeCache.get({ db, key }, logger, (err, value) => {
|
||||||
// error or the new version is not a place holder for deletion
|
// error or the new version is not a place holder for deletion
|
||||||
if (err) {
|
if (err) {
|
||||||
return logger.info('error repairing', { request, error: err });
|
if (err.is.ObjNotFound) {
|
||||||
|
return logger.debug('did not repair master: PHD was deleted');
|
||||||
|
} else {
|
||||||
|
return logger.error('error repairing', { error: err });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (!Version.isPHD(value)) {
|
if (!Version.isPHD(value)) {
|
||||||
return logger.debug('master is updated already', { request });
|
return logger.debug('master is updated already');
|
||||||
}
|
}
|
||||||
// the latest version is the same place holder for deletion
|
// the latest version is the same place holder for deletion
|
||||||
if (hints.value === value) {
|
if (data.value === value) {
|
||||||
// update the latest version with the next version
|
// update the latest version with the next version
|
||||||
|
const ops: any = [];
|
||||||
|
if (data.next) {
|
||||||
|
ops.push({ key, value: data.next.value });
|
||||||
|
// cleanup the null key if it is the new master
|
||||||
|
if (data.next.isNullKey) {
|
||||||
|
ops.push({ key: formatVersionKey(key, ''), type: 'del' });
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
ops.push({ key, type: 'del' });
|
||||||
|
}
|
||||||
const repairRequest = {
|
const repairRequest = {
|
||||||
db,
|
db,
|
||||||
array: [
|
array: ops,
|
||||||
{ type: hints.type, key, value: hints.nextValue },
|
};
|
||||||
] };
|
|
||||||
logger.info('replicate repair request', { repairRequest });
|
logger.info('replicate repair request', { repairRequest });
|
||||||
return this.writeCache.batch(repairRequest, logger, () => {});
|
return this.writeCache.batch(repairRequest, logger, () => {});
|
||||||
}
|
}
|
||||||
// The latest version is an updated place holder for deletion,
|
// The latest version is an updated place holder for deletion,
|
||||||
// repeat the repair process from listing for latest versions.
|
// repeat the repair process from listing for latest versions.
|
||||||
// The queue will ensure single repair process at any moment.
|
// The queue will ensure single repair process at any moment.
|
||||||
|
logger.info('latest version is an updated PHD');
|
||||||
return this.getByListing(request, logger, () => {});
|
return this.getByListing(request, logger, () => {});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -284,6 +375,7 @@ export default class VersioningRequestProcessor {
|
||||||
callback: (error: ArsenalError | null, data?: any) => void,
|
callback: (error: ArsenalError | null, data?: any) => void,
|
||||||
) {
|
) {
|
||||||
const { db, key, value, options } = request;
|
const { db, key, value, options } = request;
|
||||||
|
logger.addDefaultFields({ bucket: db, key, options });
|
||||||
// valid combinations of versioning options:
|
// valid combinations of versioning options:
|
||||||
// - !versioning && !versionId: normal non-versioning put
|
// - !versioning && !versionId: normal non-versioning put
|
||||||
// - versioning && !versionId: create a new version
|
// - versioning && !versionId: create a new version
|
||||||
|
@ -337,6 +429,7 @@ export default class VersioningRequestProcessor {
|
||||||
versionId: string,
|
versionId: string,
|
||||||
) => void,
|
) => void,
|
||||||
) {
|
) {
|
||||||
|
logger.info('process new version put');
|
||||||
// making a new versionId and a new version key
|
// making a new versionId and a new version key
|
||||||
const versionId = this.generateVersionId();
|
const versionId = this.generateVersionId();
|
||||||
const versionKey = formatVersionKey(request.key, versionId);
|
const versionKey = formatVersionKey(request.key, versionId);
|
||||||
|
@ -365,12 +458,22 @@ export default class VersioningRequestProcessor {
|
||||||
logger: RequestLogger,
|
logger: RequestLogger,
|
||||||
callback: (err: ArsenalError | null, data?: any, versionId?: string) => void,
|
callback: (err: ArsenalError | null, data?: any, versionId?: string) => void,
|
||||||
) {
|
) {
|
||||||
|
logger.info('process version specific put');
|
||||||
const { db, key } = request;
|
const { db, key } = request;
|
||||||
// versionId is empty: update the master version
|
// versionId is empty: update the master version
|
||||||
if (request.options.versionId === '') {
|
if (request.options.versionId === '') {
|
||||||
const versionId = this.generateVersionId();
|
const versionId = this.generateVersionId();
|
||||||
const value = Version.appendVersionId(request.value, versionId);
|
const value = Version.appendVersionId(request.value, versionId);
|
||||||
return callback(null, [{ key, value }], versionId);
|
const ops: any = [{ key, value }];
|
||||||
|
if (request.options.deleteNullKey) {
|
||||||
|
const nullKey = formatVersionKey(key, '');
|
||||||
|
ops.push({ key: nullKey, type: 'del' });
|
||||||
|
}
|
||||||
|
return callback(null, ops, versionId);
|
||||||
|
}
|
||||||
|
if (request.options.versionId === 'null') {
|
||||||
|
const nullKey = formatVersionKey(key, '');
|
||||||
|
return callback(null, [{ key: nullKey, value: request.value }], 'null');
|
||||||
}
|
}
|
||||||
// need to get the master version to check if this is the master version
|
// need to get the master version to check if this is the master version
|
||||||
this.writeCache.get({ db, key }, logger, (err, data) => {
|
this.writeCache.get({ db, key }, logger, (err, data) => {
|
||||||
|
@ -378,14 +481,115 @@ export default class VersioningRequestProcessor {
|
||||||
return callback(err);
|
return callback(err);
|
||||||
}
|
}
|
||||||
const versionId = request.options.versionId;
|
const versionId = request.options.versionId;
|
||||||
const versionKey = formatVersionKey(request.key, versionId);
|
const versionKey = formatVersionKey(key, versionId);
|
||||||
const ops = [{ key: versionKey, value: request.value }];
|
const ops: any = [];
|
||||||
if (data === undefined ||
|
const masterVersion = data !== undefined &&
|
||||||
(Version.from(data).getVersionId() ?? '') >= versionId) {
|
Version.from(data);
|
||||||
// master does not exist or is not newer than put
|
// push a version key if we're not updating the null
|
||||||
// version and needs to be updated as well.
|
// version (or in legacy Cloudservers not sending the
|
||||||
// Note that older versions have a greater version ID.
|
// 'isNull' parameter, but this has an issue, see S3C-7526)
|
||||||
ops.push({ key: request.key, value: request.value });
|
if (request.options.isNull !== true) {
|
||||||
|
const versionOp = { key: versionKey, value: request.value };
|
||||||
|
ops.push(versionOp);
|
||||||
|
}
|
||||||
|
if (masterVersion) {
|
||||||
|
// master key exists
|
||||||
|
// note that older versions have a greater version ID
|
||||||
|
const versionIdFromMaster = masterVersion.getVersionId();
|
||||||
|
if (versionIdFromMaster === undefined ||
|
||||||
|
versionIdFromMaster >= versionId) {
|
||||||
|
let value = request.value;
|
||||||
|
logger.debug('version to put is not older than master');
|
||||||
|
// Delete the deprecated, null key for backward compatibility
|
||||||
|
// to avoid storing both deprecated and new null keys.
|
||||||
|
// If master null version was put with an older Cloudserver (or in compat mode),
|
||||||
|
// there is a possibility that it also has a null versioned key
|
||||||
|
// associated, so we need to delete it as we write the null key.
|
||||||
|
// Deprecated null key gets deleted when the new CloudServer:
|
||||||
|
// - updates metadata of a null master (options.isNull=true)
|
||||||
|
// - puts metadata on top of a master null key (options.isNull=false)
|
||||||
|
if (request.options.isNull !== undefined && // new null key behavior when isNull is defined.
|
||||||
|
masterVersion.isNullVersion() && // master is null
|
||||||
|
!masterVersion.isNull2Version()) { // master does not support the new null key behavior yet.
|
||||||
|
const masterNullVersionId = masterVersion.getVersionId();
|
||||||
|
// The deprecated null key is referenced in the "versionId" property of the master key.
|
||||||
|
if (masterNullVersionId) {
|
||||||
|
const oldNullVersionKey = formatVersionKey(key, masterNullVersionId);
|
||||||
|
ops.push({ key: oldNullVersionKey, type: 'del' });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// new behavior when isNull is defined is to only
|
||||||
|
// update the master key if it is the latest
|
||||||
|
// version, old behavior needs to copy master to
|
||||||
|
// the null version because older Cloudservers
|
||||||
|
// rely on version-specific PUT to copy master
|
||||||
|
// contents to a new null version key (newer ones
|
||||||
|
// use special versionId="null" requests for this
|
||||||
|
// purpose).
|
||||||
|
if (versionIdFromMaster !== versionId ||
|
||||||
|
request.options.isNull === undefined) {
|
||||||
|
// master key is strictly older than the put version
|
||||||
|
let masterVersionId;
|
||||||
|
if (masterVersion.isNullVersion() && versionIdFromMaster) {
|
||||||
|
logger.debug('master key is a null version');
|
||||||
|
masterVersionId = versionIdFromMaster;
|
||||||
|
} else if (versionIdFromMaster === undefined) {
|
||||||
|
logger.debug('master key is nonversioned');
|
||||||
|
// master key does not have a versionID
|
||||||
|
// => create one with the "infinite" version ID
|
||||||
|
masterVersionId = getInfVid(this.replicationGroupId);
|
||||||
|
masterVersion.setVersionId(masterVersionId);
|
||||||
|
} else {
|
||||||
|
logger.debug('master key is a regular version');
|
||||||
|
}
|
||||||
|
if (request.options.isNull === true) {
|
||||||
|
if (!masterVersionId) {
|
||||||
|
// master is a regular version: delete the null key that
|
||||||
|
// may exist (older null version)
|
||||||
|
logger.debug('delete null key');
|
||||||
|
const nullKey = formatVersionKey(key, '');
|
||||||
|
ops.push({ key: nullKey, type: 'del' });
|
||||||
|
}
|
||||||
|
} else if (masterVersionId) {
|
||||||
|
logger.debug('create version key from master version');
|
||||||
|
// isNull === false means Cloudserver supports null keys,
|
||||||
|
// so create a null key in this case, and a version key otherwise
|
||||||
|
const masterKeyVersionId = request.options.isNull === false ?
|
||||||
|
'' : masterVersionId;
|
||||||
|
const masterVersionKey = formatVersionKey(key, masterKeyVersionId);
|
||||||
|
masterVersion.setNullVersion();
|
||||||
|
// isNull === false means Cloudserver supports null keys,
|
||||||
|
// so create a null key with the isNull2 flag
|
||||||
|
if (request.options.isNull === false) {
|
||||||
|
masterVersion.setNull2Version();
|
||||||
|
// else isNull === undefined means Cloudserver does not support null keys,
|
||||||
|
// and versionIdFromMaster !== versionId means that a version is PUT on top of a null version
|
||||||
|
// hence set/update the new master nullVersionId for backward compatibility
|
||||||
|
} else if (versionIdFromMaster !== versionId) {
|
||||||
|
// => set the nullVersionId to the master version if put version on top of null version.
|
||||||
|
value = Version.updateOrAppendNullVersionId(request.value, masterVersionId);
|
||||||
|
}
|
||||||
|
ops.push({ key: masterVersionKey,
|
||||||
|
value: masterVersion.toString() });
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.debug('version to put is the master');
|
||||||
|
}
|
||||||
|
ops.push({ key, value: value });
|
||||||
|
} else {
|
||||||
|
logger.debug('version to put is older than master');
|
||||||
|
if (request.options.isNull === true && !masterVersion.isNullVersion()) {
|
||||||
|
logger.debug('create or update null key');
|
||||||
|
const nullKey = formatVersionKey(key, '');
|
||||||
|
const nullKeyOp = { key: nullKey, value: request.value };
|
||||||
|
ops.push(nullKeyOp);
|
||||||
|
// for backward compatibility: remove null version key
|
||||||
|
ops.push({ key: versionKey, type: 'del' });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// master key does not exist: create it
|
||||||
|
ops.push({ key, value: request.value });
|
||||||
}
|
}
|
||||||
return callback(null, ops, versionId);
|
return callback(null, ops, versionId);
|
||||||
});
|
});
|
||||||
|
@ -399,8 +603,10 @@ export default class VersioningRequestProcessor {
|
||||||
callback: (err: ArsenalError | null, data?: any) => void,
|
callback: (err: ArsenalError | null, data?: any) => void,
|
||||||
) {
|
) {
|
||||||
const { db, key, options } = request;
|
const { db, key, options } = request;
|
||||||
|
logger.addDefaultFields({ bucket: db, key, options });
|
||||||
// no versioning or versioning configuration off
|
// no versioning or versioning configuration off
|
||||||
if (!(options && options.versionId)) {
|
if (!(options && options.versionId)) {
|
||||||
|
logger.info('process non-versioned delete');
|
||||||
return this.writeCache.batch({ db,
|
return this.writeCache.batch({ db,
|
||||||
array: [{ key, type: 'del' }] },
|
array: [{ key, type: 'del' }] },
|
||||||
logger, callback);
|
logger, callback);
|
||||||
|
@ -438,7 +644,12 @@ export default class VersioningRequestProcessor {
|
||||||
versionId?: string,
|
versionId?: string,
|
||||||
) => void,
|
) => void,
|
||||||
) {
|
) {
|
||||||
|
logger.info('process version specific delete');
|
||||||
const { db, key, options } = request;
|
const { db, key, options } = request;
|
||||||
|
if (options.versionId === 'null') {
|
||||||
|
const nullKey = formatVersionKey(key, '');
|
||||||
|
return callback(null, [{ key: nullKey, type: 'del' }], 'null');
|
||||||
|
}
|
||||||
// deleting a specific version
|
// deleting a specific version
|
||||||
this.writeCache.get({ db, key }, logger, (err, data) => {
|
this.writeCache.get({ db, key }, logger, (err, data) => {
|
||||||
if (err && !err.is.ObjNotFound) {
|
if (err && !err.is.ObjNotFound) {
|
||||||
|
@ -446,7 +657,8 @@ export default class VersioningRequestProcessor {
|
||||||
}
|
}
|
||||||
// delete the specific version
|
// delete the specific version
|
||||||
const versionId = options.versionId;
|
const versionId = options.versionId;
|
||||||
const versionKey = formatVersionKey(key, versionId);
|
const keyVersionId = options.isNull ? '' : versionId;
|
||||||
|
const versionKey = formatVersionKey(key, keyVersionId);
|
||||||
const ops: any = [{ key: versionKey, type: 'del' }];
|
const ops: any = [{ key: versionKey, type: 'del' }];
|
||||||
// update the master version as PHD if it is the deleting version
|
// update the master version as PHD if it is the deleting version
|
||||||
if (Version.isPHD(data) ||
|
if (Version.isPHD(data) ||
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import { RequestLogger } from 'werelogs';
|
||||||
|
|
||||||
import errors, { ArsenalError } from '../errors';
|
import errors, { ArsenalError } from '../errors';
|
||||||
import WriteGatheringManager from './WriteGatheringManager';
|
import WriteGatheringManager from './WriteGatheringManager';
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import { RequestLogger } from 'werelogs';
|
||||||
|
|
||||||
import { ArsenalError } from '../errors';
|
import { ArsenalError } from '../errors';
|
||||||
|
|
||||||
const WG_TIMEOUT = 5; // batching period in milliseconds
|
const WG_TIMEOUT = 5; // batching period in milliseconds
|
||||||
|
|
69
package.json
69
package.json
|
@ -3,7 +3,7 @@
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=16"
|
"node": ">=16"
|
||||||
},
|
},
|
||||||
"version": "8.1.83",
|
"version": "8.1.134",
|
||||||
"description": "Common utilities for the S3 project components",
|
"description": "Common utilities for the S3 project components",
|
||||||
"main": "build/index.js",
|
"main": "build/index.js",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
@ -19,39 +19,38 @@
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@azure/identity": "^3.1.1",
|
"@azure/identity": "^3.1.1",
|
||||||
"@azure/storage-blob": "^12.12.0",
|
"@azure/storage-blob": "^12.12.0",
|
||||||
"@types/async": "^3.2.12",
|
"@js-sdsl/ordered-set": "^4.4.2",
|
||||||
"@types/utf8": "^3.0.1",
|
"@swc/cli": "^0.4.0",
|
||||||
"JSONStream": "^1.0.0",
|
"@swc/core": "^1.7.4",
|
||||||
"agentkeepalive": "^4.1.3",
|
"agentkeepalive": "^4.1.3",
|
||||||
"ajv": "6.12.3",
|
"ajv": "^6.12.3",
|
||||||
"async": "~2.6.4",
|
"async": "^2.6.4",
|
||||||
"aws-sdk": "^2.1005.0",
|
"aws-sdk": "^2.1005.0",
|
||||||
"backo": "^1.1.0",
|
"backo": "^1.1.0",
|
||||||
"base-x": "3.0.8",
|
"base-x": "^3.0.8",
|
||||||
"base62": "2.0.1",
|
"base62": "^2.0.1",
|
||||||
"bson": "4.0.0",
|
"bson": "^4.0.0",
|
||||||
"debug": "~4.1.0",
|
"debug": "^4.1.0",
|
||||||
"diskusage": "^1.1.1",
|
"diskusage": "^1.1.1",
|
||||||
"fcntl": "github:scality/node-fcntl#0.2.0",
|
"fcntl": "git+https://git.yourcmc.ru/vitalif/zenko-fcntl.git",
|
||||||
"hdclient": "scality/hdclient#1.1.5",
|
"httpagent": "git+https://git.yourcmc.ru/vitalif/zenko-httpagent.git#development/1.0",
|
||||||
"httpagent": "scality/httpagent#1.0.6",
|
|
||||||
"https-proxy-agent": "^2.2.0",
|
"https-proxy-agent": "^2.2.0",
|
||||||
"ioredis": "^4.28.5",
|
"ioredis": "^4.28.5",
|
||||||
"ipaddr.js": "1.9.1",
|
"ipaddr.js": "^1.9.1",
|
||||||
"joi": "^17.6.0",
|
"joi": "^17.6.0",
|
||||||
"level": "~5.0.1",
|
"JSONStream": "^1.0.0",
|
||||||
"level-sublevel": "~6.6.5",
|
"level": "^5.0.1",
|
||||||
"mongodb": "^3.0.1",
|
"level-sublevel": "^6.6.5",
|
||||||
|
"mongodb": "^5.2.0",
|
||||||
"node-forge": "^1.3.0",
|
"node-forge": "^1.3.0",
|
||||||
"prom-client": "10.2.3",
|
"prom-client": "^14.2.0",
|
||||||
"simple-glob": "^0.2.0",
|
"simple-glob": "^0.2.0",
|
||||||
"socket.io": "2.4.1",
|
"socket.io": "^4.6.1",
|
||||||
"socket.io-client": "2.4.0",
|
"socket.io-client": "^4.6.1",
|
||||||
"sproxydclient": "scality/sproxydclient#8.0.7",
|
"utf8": "^3.0.0",
|
||||||
"utf8": "3.0.0",
|
|
||||||
"uuid": "^3.0.1",
|
"uuid": "^3.0.1",
|
||||||
"werelogs": "scality/werelogs#8.1.2",
|
"werelogs": "git+https://git.yourcmc.ru/vitalif/zenko-werelogs.git#development/8.1",
|
||||||
"xml2js": "~0.4.23"
|
"xml2js": "^0.4.23"
|
||||||
},
|
},
|
||||||
"optionalDependencies": {
|
"optionalDependencies": {
|
||||||
"ioctl": "^2.0.2"
|
"ioctl": "^2.0.2"
|
||||||
|
@ -60,22 +59,24 @@
|
||||||
"@babel/preset-env": "^7.16.11",
|
"@babel/preset-env": "^7.16.11",
|
||||||
"@babel/preset-typescript": "^7.16.7",
|
"@babel/preset-typescript": "^7.16.7",
|
||||||
"@sinonjs/fake-timers": "^6.0.1",
|
"@sinonjs/fake-timers": "^6.0.1",
|
||||||
|
"@types/async": "^3.2.12",
|
||||||
|
"@types/utf8": "^3.0.1",
|
||||||
"@types/ioredis": "^4.28.10",
|
"@types/ioredis": "^4.28.10",
|
||||||
"@types/jest": "^27.4.1",
|
"@types/jest": "^27.4.1",
|
||||||
"@types/node": "^17.0.21",
|
"@types/node": "^18.19.41",
|
||||||
"@types/xml2js": "^0.4.11",
|
"@types/xml2js": "^0.4.11",
|
||||||
"eslint": "^8.12.0",
|
"eslint": "^8.14.0",
|
||||||
"eslint-config-airbnb": "6.2.0",
|
"eslint-config-airbnb-base": "^15.0.0",
|
||||||
"eslint-config-scality": "scality/Guidelines#ec33dfb",
|
"eslint-config-scality": "git+https://git.yourcmc.ru/vitalif/zenko-eslint-config-scality.git",
|
||||||
"eslint-plugin-react": "^4.3.0",
|
"eslint-plugin-react": "^4.3.0",
|
||||||
"jest": "^27.5.1",
|
"jest": "^27.5.1",
|
||||||
"mongodb-memory-server": "^6.0.2",
|
"mongodb-memory-server": "^8.12.2",
|
||||||
"nyc": "^15.1.0",
|
"nyc": "^15.1.0",
|
||||||
"sinon": "^9.0.2",
|
"sinon": "^9.0.2",
|
||||||
"temp": "0.9.1",
|
"temp": "^0.9.1",
|
||||||
"ts-jest": "^27.1.3",
|
"ts-jest": "^27.1.3",
|
||||||
"ts-node": "^10.6.0",
|
"ts-node": "^10.6.0",
|
||||||
"typescript": "^4.6.2"
|
"typescript": "^4.9.5"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"lint": "eslint $(git ls-files '*.js')",
|
"lint": "eslint $(git ls-files '*.js')",
|
||||||
|
@ -83,9 +84,11 @@
|
||||||
"lint_yml": "yamllint $(git ls-files '*.yml')",
|
"lint_yml": "yamllint $(git ls-files '*.yml')",
|
||||||
"test": "jest tests/unit",
|
"test": "jest tests/unit",
|
||||||
"build": "tsc",
|
"build": "tsc",
|
||||||
"prepare": "yarn build",
|
"prepack": "tsc",
|
||||||
|
"postinstall": "[ -d build ] || swc -d build --copy-files package.json index.ts lib",
|
||||||
"ft_test": "jest tests/functional --testTimeout=120000 --forceExit",
|
"ft_test": "jest tests/functional --testTimeout=120000 --forceExit",
|
||||||
"coverage": "nyc --clean jest tests --coverage --testTimeout=120000 --forceExit"
|
"coverage": "nyc --clean jest tests --coverage --testTimeout=120000 --forceExit",
|
||||||
|
"build_doc": "cd documentation/listingAlgos/pics; dot -Tsvg delimiterStateChart.dot > delimiterStateChart.svg; dot -Tsvg delimiterMasterV0StateChart.dot > delimiterMasterV0StateChart.svg; dot -Tsvg delimiterVersionsStateChart.dot > delimiterVersionsStateChart.svg"
|
||||||
},
|
},
|
||||||
"private": true,
|
"private": true,
|
||||||
"jest": {
|
"jest": {
|
||||||
|
|
|
@ -0,0 +1,356 @@
|
||||||
|
const async = require('async');
|
||||||
|
const assert = require('assert');
|
||||||
|
const cluster = require('cluster');
|
||||||
|
const http = require('http');
|
||||||
|
|
||||||
|
const errors = require('../../../build/lib/errors').default;
|
||||||
|
|
||||||
|
const {
|
||||||
|
setupRPCPrimary,
|
||||||
|
setupRPCWorker,
|
||||||
|
sendWorkerCommand,
|
||||||
|
getPendingCommandsCount,
|
||||||
|
} = require('../../../build/lib/clustering/ClusterRPC');
|
||||||
|
|
||||||
|
/* eslint-disable prefer-const */
|
||||||
|
let SERVER_PORT;
|
||||||
|
let N_WORKERS;
|
||||||
|
/* eslint-enable prefer-const */
|
||||||
|
|
||||||
|
/* eslint-disable no-console */
|
||||||
|
|
||||||
|
function genUIDS() {
|
||||||
|
return Math.trunc(Math.random() * 0x10000).toString(16);
|
||||||
|
}
|
||||||
|
|
||||||
|
// for testing robustness: regularly pollute the message channel with
|
||||||
|
// unrelated IPC messages
|
||||||
|
function sendPollutionMessage(message) {
|
||||||
|
if (cluster.isPrimary) {
|
||||||
|
const randomWorker = Math.trunc(Math.random() * cluster.workers.length);
|
||||||
|
const worker = cluster.workers[randomWorker];
|
||||||
|
if (worker) {
|
||||||
|
worker.send(message);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
process.send(message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const ipcPolluterIntervals = [
|
||||||
|
setInterval(
|
||||||
|
() => sendPollutionMessage('string pollution'), 1500),
|
||||||
|
setInterval(
|
||||||
|
() => sendPollutionMessage({ pollution: 'bar' }), 2321),
|
||||||
|
setInterval(
|
||||||
|
() => sendPollutionMessage({ type: 'pollution', foo: { bar: 'baz' } }), 2777),
|
||||||
|
];
|
||||||
|
|
||||||
|
function someTestHandlerFunc(payload, uids, callback) {
|
||||||
|
setTimeout(() => callback(null, { someResponsePayload: 'bar' }), 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
function testHandlerWithFailureFunc(payload, uids, callback) {
|
||||||
|
setTimeout(() => {
|
||||||
|
// exactly one of the workers fails to execute this command
|
||||||
|
if (cluster.worker.id === 1) {
|
||||||
|
callback(errors.ServiceFailure);
|
||||||
|
} else {
|
||||||
|
callback(null, { someResponsePayload: 'bar' });
|
||||||
|
}
|
||||||
|
}, 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
const rpcHandlers = {
|
||||||
|
SomeTestHandler: someTestHandlerFunc,
|
||||||
|
TestHandlerWithFailure: testHandlerWithFailureFunc,
|
||||||
|
TestHandlerWithNoResponse: () => {},
|
||||||
|
};
|
||||||
|
|
||||||
|
const primaryHandlers = {
|
||||||
|
echoHandler: (worker, payload, uids, callback) => {
|
||||||
|
callback(null, { workerId: worker.id, payload, uids });
|
||||||
|
},
|
||||||
|
errorWithHttpCodeHandler: (_worker, _payload, _uids, callback) => {
|
||||||
|
callback({ name: 'ErrorMock', code: 418, message: 'An error message from primary' });
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
function respondOnTestFailure(message, error, results) {
|
||||||
|
console.error('After sendWorkerCommand() resolve/reject: ' +
|
||||||
|
`${message}, error=${error}, results=${JSON.stringify(results)}`);
|
||||||
|
console.trace();
|
||||||
|
throw errors.InternalError;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function successfulCommandTestGeneric(nWorkers) {
|
||||||
|
try {
|
||||||
|
const results = await sendWorkerCommand('*', 'SomeTestHandler', genUIDS(), {});
|
||||||
|
if (results.length !== nWorkers) {
|
||||||
|
return respondOnTestFailure(
|
||||||
|
`expected ${nWorkers} worker results, got ${results.length}`,
|
||||||
|
null, results);
|
||||||
|
}
|
||||||
|
for (const result of results) {
|
||||||
|
if (typeof result !== 'object' || result === null) {
|
||||||
|
return respondOnTestFailure('not all results are objects', null, results);
|
||||||
|
}
|
||||||
|
if (result.error !== null) {
|
||||||
|
return respondOnTestFailure(
|
||||||
|
'one or more workers had an unexpected error',
|
||||||
|
null, results);
|
||||||
|
}
|
||||||
|
if (typeof result.result !== 'object' || result.result === null) {
|
||||||
|
return respondOnTestFailure(
|
||||||
|
'one or more workers did not return a result object',
|
||||||
|
null, results);
|
||||||
|
}
|
||||||
|
if (result.result.someResponsePayload !== 'bar') {
|
||||||
|
return respondOnTestFailure(
|
||||||
|
'one or more workers did not return the expected payload',
|
||||||
|
null, results);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
} catch (err) {
|
||||||
|
return respondOnTestFailure(`returned unexpected error ${err}`, err, null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function successfulCommandTest() {
|
||||||
|
return successfulCommandTestGeneric(N_WORKERS);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function successfulCommandWithExtraWorkerTest() {
|
||||||
|
return successfulCommandTestGeneric(N_WORKERS + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function unsupportedToWorkersTest() {
|
||||||
|
try {
|
||||||
|
const results = await sendWorkerCommand('badToWorkers', 'SomeTestHandler', genUIDS(), {});
|
||||||
|
return respondOnTestFailure('expected an error', null, results);
|
||||||
|
} catch (err) {
|
||||||
|
if (!err.is.NotImplemented) {
|
||||||
|
return respondOnTestFailure('expected a NotImplemented error', err, null);
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function unsupportedHandlerTest() {
|
||||||
|
try {
|
||||||
|
const results = await sendWorkerCommand('*', 'AWrongTestHandler', genUIDS(), {});
|
||||||
|
if (results.length !== N_WORKERS) {
|
||||||
|
return respondOnTestFailure(
|
||||||
|
`expected ${N_WORKERS} worker results, got ${results.length}`,
|
||||||
|
null, results);
|
||||||
|
}
|
||||||
|
for (const result of results) {
|
||||||
|
if (typeof result !== 'object' || result === null) {
|
||||||
|
return respondOnTestFailure('not all results are objects', null, results);
|
||||||
|
}
|
||||||
|
if (result.error === null || !result.error.is.NotImplemented) {
|
||||||
|
return respondOnTestFailure(
|
||||||
|
'one or more workers did not return the expected NotImplemented error',
|
||||||
|
null, results);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
} catch (err) {
|
||||||
|
return respondOnTestFailure(`returned unexpected error ${err}`, err, null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function missingUidsTest() {
|
||||||
|
try {
|
||||||
|
const results = await sendWorkerCommand('*', 'SomeTestHandler', undefined, {});
|
||||||
|
return respondOnTestFailure('expected an error', null, results);
|
||||||
|
} catch (err) {
|
||||||
|
if (!err.is.MissingParameter) {
|
||||||
|
return respondOnTestFailure('expected a MissingParameter error', err, null);
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function duplicateUidsTest() {
|
||||||
|
const dupUIDS = genUIDS();
|
||||||
|
const promises = [
|
||||||
|
sendWorkerCommand('*', 'SomeTestHandler', dupUIDS, {}),
|
||||||
|
sendWorkerCommand('*', 'SomeTestHandler', dupUIDS, {}),
|
||||||
|
];
|
||||||
|
const results = await Promise.allSettled(promises);
|
||||||
|
if (results[1].status !== 'rejected') {
|
||||||
|
return respondOnTestFailure('expected an error from the second call', null, null);
|
||||||
|
}
|
||||||
|
if (!results[1].reason.is.OperationAborted) {
|
||||||
|
return respondOnTestFailure(
|
||||||
|
'expected a OperationAborted error', results[1].reason, null);
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function unsuccessfulWorkerTest() {
|
||||||
|
try {
|
||||||
|
const results = await sendWorkerCommand('*', 'TestHandlerWithFailure', genUIDS(), {});
|
||||||
|
if (results.length !== N_WORKERS) {
|
||||||
|
return respondOnTestFailure(
|
||||||
|
`expected ${N_WORKERS} worker results, got ${results.length}`,
|
||||||
|
null, results);
|
||||||
|
}
|
||||||
|
const nServiceFailures = results.filter(result => (
|
||||||
|
result.error && result.error.is.ServiceFailure
|
||||||
|
)).length;
|
||||||
|
if (nServiceFailures !== 1) {
|
||||||
|
return respondOnTestFailure(
|
||||||
|
'expected exactly one worker result to be ServiceFailure error',
|
||||||
|
null, results);
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
} catch (err) {
|
||||||
|
return respondOnTestFailure(`returned unexpected error ${err}`, err, null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function workerTimeoutTest() {
|
||||||
|
try {
|
||||||
|
const results = await sendWorkerCommand(
|
||||||
|
'*', 'TestHandlerWithNoResponse', genUIDS(), {}, 1000);
|
||||||
|
return respondOnTestFailure('expected an error', null, results);
|
||||||
|
} catch (err) {
|
||||||
|
if (!err.is.RequestTimeout) {
|
||||||
|
return respondOnTestFailure('expected a RequestTimeout error', err, null);
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function workerToPrimaryEcho() {
|
||||||
|
const uids = genUIDS();
|
||||||
|
const payload = { testing: true };
|
||||||
|
const expected = { workerId: cluster.worker.id, payload, uids };
|
||||||
|
|
||||||
|
const results = await sendWorkerCommand('PRIMARY', 'echoHandler', uids, payload);
|
||||||
|
assert.strictEqual(results.length, 1, 'There is 1 and only 1 primary');
|
||||||
|
assert.ifError(results[0].error);
|
||||||
|
assert.deepStrictEqual(results[0].result, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function workerToPrimaryErrorWithHttpCode() {
|
||||||
|
const uids = genUIDS();
|
||||||
|
const payload = { testing: true };
|
||||||
|
const results = await sendWorkerCommand('PRIMARY', 'errorWithHttpCodeHandler', uids, payload);
|
||||||
|
assert.strictEqual(results.length, 1, 'There is 1 and only 1 primary');
|
||||||
|
assert.ok(results[0].error);
|
||||||
|
assert.strictEqual(results[0].error.message, 'An error message from primary');
|
||||||
|
assert.strictEqual(results[0].error.code, 418);
|
||||||
|
}
|
||||||
|
|
||||||
|
const TEST_URLS = {
|
||||||
|
'/successful-command': successfulCommandTest,
|
||||||
|
'/successful-command-with-extra-worker': successfulCommandWithExtraWorkerTest,
|
||||||
|
'/unsupported-to-workers': unsupportedToWorkersTest,
|
||||||
|
'/unsupported-handler': unsupportedHandlerTest,
|
||||||
|
'/missing-uids': missingUidsTest,
|
||||||
|
'/duplicate-uids': duplicateUidsTest,
|
||||||
|
'/unsuccessful-worker': unsuccessfulWorkerTest,
|
||||||
|
'/worker-timeout': workerTimeoutTest,
|
||||||
|
'/worker-to-primary/echo': workerToPrimaryEcho,
|
||||||
|
'/worker-to-primary/error-with-http-code': workerToPrimaryErrorWithHttpCode,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (process.argv.length !== 4) {
|
||||||
|
console.error('ClusterRPC test server: GET requests on test URLs trigger test runs\n\n' +
|
||||||
|
'Usage: node ClusterRPC-test-server.js <port> <nb-workers>\n\n' +
|
||||||
|
'Available test URLs:');
|
||||||
|
console.error(`${Object.keys(TEST_URLS).map(url => `- ${url}\n`).join('')}`);
|
||||||
|
process.exit(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* eslint-disable prefer-const */
|
||||||
|
[
|
||||||
|
SERVER_PORT,
|
||||||
|
N_WORKERS,
|
||||||
|
] = process.argv.slice(2, 4).map(value => Number.parseInt(value, 10));
|
||||||
|
/* eslint-enable prefer-const */
|
||||||
|
|
||||||
|
let server;
|
||||||
|
|
||||||
|
if (cluster.isPrimary) {
|
||||||
|
async.timesSeries(
|
||||||
|
N_WORKERS,
|
||||||
|
(i, wcb) => cluster.fork().on('online', wcb),
|
||||||
|
() => {
|
||||||
|
setupRPCPrimary(primaryHandlers);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
// in worker
|
||||||
|
server = http.createServer((req, res) => {
|
||||||
|
if (req.url in TEST_URLS) {
|
||||||
|
return TEST_URLS[req.url]().then(() => {
|
||||||
|
if (getPendingCommandsCount() !== 0) {
|
||||||
|
console.error(`There are still ${getPendingCommandsCount()} pending ` +
|
||||||
|
`RPC commands after test ${req.url} completed`);
|
||||||
|
throw errors.InternalError;
|
||||||
|
}
|
||||||
|
res.writeHead(200);
|
||||||
|
res.end();
|
||||||
|
}).catch(err => {
|
||||||
|
// serialize AssertionError to be displayed nicely in jest
|
||||||
|
if (err instanceof assert.AssertionError) {
|
||||||
|
const serializedErr = JSON.stringify({
|
||||||
|
code: err.code,
|
||||||
|
message: err.message,
|
||||||
|
stack: err.stack,
|
||||||
|
actual: err.actual,
|
||||||
|
expected: err.expected,
|
||||||
|
operator: err.operator,
|
||||||
|
});
|
||||||
|
res.writeHead(500);
|
||||||
|
res.end(serializedErr);
|
||||||
|
} else {
|
||||||
|
res.writeHead(err.code || 500);
|
||||||
|
res.end(err.message);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
console.error(`Invalid test URL ${req.url}`);
|
||||||
|
res.writeHead(400);
|
||||||
|
res.end();
|
||||||
|
return undefined;
|
||||||
|
});
|
||||||
|
server.listen(SERVER_PORT);
|
||||||
|
server.on('listening', () => {
|
||||||
|
console.log('Worker is listening');
|
||||||
|
});
|
||||||
|
|
||||||
|
setupRPCWorker(rpcHandlers);
|
||||||
|
}
|
||||||
|
|
||||||
|
function stop(signal) {
|
||||||
|
if (cluster.isPrimary) {
|
||||||
|
console.log(`Handling signal ${signal}`);
|
||||||
|
for (const worker of Object.values(cluster.workers)) {
|
||||||
|
worker.kill(signal);
|
||||||
|
worker.on('exit', () => {
|
||||||
|
console.log(`Worker ${worker.id} exited`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const interval of ipcPolluterIntervals) {
|
||||||
|
clearInterval(interval);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
process.on('SIGTERM', stop);
|
||||||
|
process.on('SIGINT', stop);
|
||||||
|
process.on('SIGPIPE', () => {});
|
||||||
|
|
||||||
|
// for testing: spawn a new worker each time SIGUSR1 is received
|
||||||
|
function spawnNewWorker() {
|
||||||
|
if (cluster.isPrimary) {
|
||||||
|
cluster.fork();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
process.on('SIGUSR1', spawnNewWorker);
|
|
@ -0,0 +1,151 @@
|
||||||
|
'use strict'; // eslint-disable-line
|
||||||
|
const assert = require('assert');
|
||||||
|
const http = require('http');
|
||||||
|
const readline = require('readline');
|
||||||
|
const spawn = require('child_process').spawn;
|
||||||
|
|
||||||
|
const TEST_SERVER_PORT = 8800;
|
||||||
|
const NB_WORKERS = 4;
|
||||||
|
|
||||||
|
let testServer = null;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* jest tests don't correctly support cluster mode with child forked
|
||||||
|
* processes, instead we use an external test server that launches
|
||||||
|
* each test based on the provided URL, and returns either 200 for
|
||||||
|
* success or 500 for failure. A crash would also cause a failure
|
||||||
|
* from the client side.
|
||||||
|
*/
|
||||||
|
function startTestServer(done) {
|
||||||
|
testServer = spawn('node', [
|
||||||
|
`${__dirname}/ClusterRPC-test-server.js`,
|
||||||
|
TEST_SERVER_PORT,
|
||||||
|
NB_WORKERS,
|
||||||
|
]);
|
||||||
|
// gather server stderr to display test failures info
|
||||||
|
testServer.stdout.pipe(process.stdout);
|
||||||
|
testServer.stderr.pipe(process.stderr);
|
||||||
|
|
||||||
|
const rl = readline.createInterface({
|
||||||
|
input: testServer.stdout,
|
||||||
|
});
|
||||||
|
let nbListeningWorkers = 0;
|
||||||
|
rl.on('line', line => {
|
||||||
|
if (line === 'Worker is listening') {
|
||||||
|
nbListeningWorkers++;
|
||||||
|
if (nbListeningWorkers === NB_WORKERS) {
|
||||||
|
rl.close();
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function stopTestServer(done) {
|
||||||
|
testServer.kill('SIGTERM');
|
||||||
|
testServer.on('close', done);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Try to deserialize and recreate AssertionError with stackTrace from spawned server
|
||||||
|
* @param {string} responseBody maybe serialized AssertionError
|
||||||
|
* @throws {assert.AssertionError}
|
||||||
|
* @returns {undefined}
|
||||||
|
*/
|
||||||
|
function handleAssertionError(responseBody) {
|
||||||
|
let parsed;
|
||||||
|
try {
|
||||||
|
parsed = JSON.parse(responseBody);
|
||||||
|
} catch (_) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parsed && parsed.code === 'ERR_ASSERTION') {
|
||||||
|
const err = new assert.AssertionError(parsed);
|
||||||
|
err.stack = parsed.stack;
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function runTest(testUrl, cb) {
|
||||||
|
const req = http.request(`http://localhost:${TEST_SERVER_PORT}/${testUrl}`, res => {
|
||||||
|
let responseBody = '';
|
||||||
|
res
|
||||||
|
.on('data', (chunk) => {
|
||||||
|
responseBody += chunk;
|
||||||
|
})
|
||||||
|
.on('end', () => {
|
||||||
|
try {
|
||||||
|
handleAssertionError(responseBody);
|
||||||
|
expect(res.statusCode).toEqual(200);
|
||||||
|
} catch (err) {
|
||||||
|
if (!(err instanceof assert.AssertionError)) {
|
||||||
|
err.message += `\n\nBody:\n${responseBody}`;
|
||||||
|
}
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
return cb();
|
||||||
|
})
|
||||||
|
.on('error', err => cb(err));
|
||||||
|
});
|
||||||
|
req
|
||||||
|
.end()
|
||||||
|
.on('error', err => cb(err));
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('ClusterRPC', () => {
|
||||||
|
beforeAll(done => startTestServer(done));
|
||||||
|
afterAll(done => stopTestServer(done));
|
||||||
|
|
||||||
|
it('should send a successful command to all workers', done => {
|
||||||
|
runTest('successful-command', done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should error if "toWorkers" field is not "*"', done => {
|
||||||
|
runTest('unsupported-to-workers', done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should error if handler name is not known', done => {
|
||||||
|
runTest('unsupported-handler', done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should error if "uids" field is not passed', done => {
|
||||||
|
runTest('missing-uids', done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should error if two simultaneous commands with same "uids" field are sent', done => {
|
||||||
|
runTest('duplicate-uids', done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should timeout if one or more workers don\'t respond in allocated time', done => {
|
||||||
|
runTest('worker-timeout', done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return worker errors in results array', done => {
|
||||||
|
runTest('unsuccessful-worker', done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should send a successful command to all workers after an extra worker is spawned', done => {
|
||||||
|
const rl = readline.createInterface({
|
||||||
|
input: testServer.stdout,
|
||||||
|
});
|
||||||
|
rl.on('line', line => {
|
||||||
|
if (line === 'Worker is listening') {
|
||||||
|
rl.close();
|
||||||
|
runTest('successful-command-with-extra-worker', done);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// The test server spawns a new worker when it receives SIGUSR1
|
||||||
|
testServer.kill('SIGUSR1');
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('worker to primary', () => {
|
||||||
|
it('should succeed and return a result', done => {
|
||||||
|
runTest('worker-to-primary/echo', done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return an error with a code', done => {
|
||||||
|
runTest('worker-to-primary/error-with-http-code', done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -44,41 +44,37 @@ describe('MongoClientInterface::metadata.deleteObjectMD', () => {
|
||||||
let collection;
|
let collection;
|
||||||
|
|
||||||
function getObjectCount(cb) {
|
function getObjectCount(cb) {
|
||||||
collection.countDocuments((err, count) => {
|
collection.countDocuments()
|
||||||
if (err) {
|
.then(count => cb(null, count))
|
||||||
cb(err);
|
.catch(err => cb(err));
|
||||||
}
|
|
||||||
cb(null, count);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function getObject(key, cb) {
|
function getObject(key, cb) {
|
||||||
collection.findOne({
|
collection.findOne({
|
||||||
_id: key,
|
_id: key,
|
||||||
}, {}, (err, doc) => {
|
}, {}).then(doc => {
|
||||||
if (err) {
|
|
||||||
return cb(err);
|
|
||||||
}
|
|
||||||
if (!doc) {
|
if (!doc) {
|
||||||
return cb(errors.NoSuchKey);
|
return cb(errors.NoSuchKey);
|
||||||
}
|
}
|
||||||
return cb(null, doc.value);
|
return cb(null, doc.value);
|
||||||
});
|
}).catch(err => cb(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
beforeAll(done => {
|
beforeAll(done => {
|
||||||
mongoserver.waitUntilRunning().then(() => {
|
mongoserver.start().then(() => {
|
||||||
const opts = {
|
mongoserver.waitUntilRunning().then(() => {
|
||||||
mongodb: {
|
const opts = {
|
||||||
replicaSetHosts: 'localhost:27018',
|
mongodb: {
|
||||||
writeConcern: 'majority',
|
replicaSetHosts: 'localhost:27018',
|
||||||
replicaSet: 'rs0',
|
writeConcern: 'majority',
|
||||||
readPreference: 'primary',
|
replicaSet: 'rs0',
|
||||||
database: DB_NAME,
|
readPreference: 'primary',
|
||||||
},
|
database: DB_NAME,
|
||||||
};
|
},
|
||||||
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
|
};
|
||||||
metadata.setup(done);
|
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
|
||||||
|
metadata.setup(done);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -458,6 +454,48 @@ describe('MongoClientInterface::metadata.deleteObjectMD', () => {
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should delete the object directly if params.doesNotNeedOpogUpdate is true', done => {
|
||||||
|
const objName = 'object-to-delete';
|
||||||
|
const objVal = {
|
||||||
|
key: 'object-to-delete',
|
||||||
|
versionId: 'null',
|
||||||
|
};
|
||||||
|
const versionParams = {
|
||||||
|
versioning: false,
|
||||||
|
versionId: null,
|
||||||
|
repairMaster: null,
|
||||||
|
};
|
||||||
|
async.series([
|
||||||
|
next => {
|
||||||
|
metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
metadata.deleteObjectMD(BUCKET_NAME, objName, { doesNotNeedOpogUpdate: true }, logger, next);
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
metadata.getObjectMD(BUCKET_NAME, objName, null, logger, err => {
|
||||||
|
assert.deepStrictEqual(err, errors.NoSuchKey);
|
||||||
|
return next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
getObjectCount((err, count) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(count, 0);
|
||||||
|
return next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should throw an error if params.doesNotNeedOpogUpdate is true and object does not exist', done => {
|
||||||
|
const objName = 'non-existent-object';
|
||||||
|
metadata.deleteObjectMD(BUCKET_NAME, objName, { doesNotNeedOpogUpdate: true }, logger, err => {
|
||||||
|
assert.deepStrictEqual(err, errors.InternalError);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -74,13 +74,7 @@ describe('MongoClientInterface::metadata.getObjectMD', () => {
|
||||||
{
|
{
|
||||||
$set: { _id: mKey, value: objVal },
|
$set: { _id: mKey, value: objVal },
|
||||||
},
|
},
|
||||||
{ upsert: true },
|
{ upsert: true }).then(() => cb(null)).catch(err => cb(err));
|
||||||
err => {
|
|
||||||
if (err) {
|
|
||||||
return cb(err);
|
|
||||||
}
|
|
||||||
return cb(null);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -93,22 +87,24 @@ describe('MongoClientInterface::metadata.getObjectMD', () => {
|
||||||
collection.updateMany(
|
collection.updateMany(
|
||||||
{ 'value.key': key },
|
{ 'value.key': key },
|
||||||
{ $set: { 'value.deleted': true } },
|
{ $set: { 'value.deleted': true } },
|
||||||
{ upsert: false }, cb);
|
{ upsert: false }).then(() => cb()).catch(err => cb(err));
|
||||||
}
|
}
|
||||||
|
|
||||||
beforeAll(done => {
|
beforeAll(done => {
|
||||||
mongoserver.waitUntilRunning().then(() => {
|
mongoserver.start().then(() => {
|
||||||
const opts = {
|
mongoserver.waitUntilRunning().then(() => {
|
||||||
mongodb: {
|
const opts = {
|
||||||
replicaSetHosts: 'localhost:27019',
|
mongodb: {
|
||||||
writeConcern: 'majority',
|
replicaSetHosts: 'localhost:27019',
|
||||||
replicaSet: 'rs0',
|
writeConcern: 'majority',
|
||||||
readPreference: 'primary',
|
replicaSet: 'rs0',
|
||||||
database: DB_NAME,
|
readPreference: 'primary',
|
||||||
},
|
database: DB_NAME,
|
||||||
};
|
},
|
||||||
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
|
};
|
||||||
metadata.setup(done);
|
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
|
||||||
|
metadata.setup(done);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,331 @@
|
||||||
|
const async = require('async');
|
||||||
|
const assert = require('assert');
|
||||||
|
const werelogs = require('werelogs');
|
||||||
|
const { MongoMemoryReplSet } = require('mongodb-memory-server');
|
||||||
|
const { versioning } = require('../../../../index');
|
||||||
|
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
|
||||||
|
const BucketInfo = require('../../../../lib/models/BucketInfo').default;
|
||||||
|
const MetadataWrapper =
|
||||||
|
require('../../../../lib/storage/metadata/MetadataWrapper');
|
||||||
|
const genVID = versioning.VersionID.generateVersionId;
|
||||||
|
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
|
||||||
|
const { formatMasterKey, formatVersionKey } = require('../../../../lib/storage/metadata/mongoclient/utils');
|
||||||
|
|
||||||
|
const IMPL_NAME = 'mongodb';
|
||||||
|
const DB_NAME = 'metadata';
|
||||||
|
const BUCKET_NAME = 'test-bucket-batching';
|
||||||
|
const replicationGroupId = 'RG001';
|
||||||
|
const N = 10;
|
||||||
|
|
||||||
|
const mongoserver = new MongoMemoryReplSet({
|
||||||
|
debug: false,
|
||||||
|
instanceOpts: [
|
||||||
|
{ port: 27019 },
|
||||||
|
],
|
||||||
|
replSet: {
|
||||||
|
name: 'rs0',
|
||||||
|
count: 1,
|
||||||
|
DB_NAME,
|
||||||
|
storageEngine: 'ephemeralForTest',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
let uidCounter = 0;
|
||||||
|
function generateVersionId() {
|
||||||
|
return genVID(`${process.pid}.${uidCounter++}`,
|
||||||
|
replicationGroupId);
|
||||||
|
}
|
||||||
|
|
||||||
|
const variations = [
|
||||||
|
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0, versioning: false },
|
||||||
|
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0, versioning: true },
|
||||||
|
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1, versioning: false },
|
||||||
|
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1, versioning: true },
|
||||||
|
];
|
||||||
|
|
||||||
|
describe('MongoClientInterface::metadata.getObjectsMD', () => {
|
||||||
|
let metadata;
|
||||||
|
let collection;
|
||||||
|
let versionId2;
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
key: 'pfx1-test-object',
|
||||||
|
objVal: {
|
||||||
|
key: 'pfx1-test-object',
|
||||||
|
versionId: 'null',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
function updateMasterObject(objName, versionId, objVal, vFormat, cb) {
|
||||||
|
const mKey = formatMasterKey(objName, vFormat);
|
||||||
|
collection.updateOne(
|
||||||
|
{
|
||||||
|
_id: mKey,
|
||||||
|
$or: [{
|
||||||
|
'value.versionId': {
|
||||||
|
$exists: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'value.versionId': {
|
||||||
|
$gt: versionId,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
$set: { _id: mKey, value: objVal },
|
||||||
|
},
|
||||||
|
{ upsert: true }).then(() => cb(null)).catch(err => cb(err));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the "deleted" property to true
|
||||||
|
* @param {string} key object name
|
||||||
|
* @param {Function} cb callback
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
function flagObjectForDeletion(key, cb) {
|
||||||
|
collection.updateMany(
|
||||||
|
{ 'value.key': key },
|
||||||
|
{ $set: { 'value.deleted': true } },
|
||||||
|
{ upsert: false }).then(() => cb()).catch(err => cb(err));
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeAll(done => {
|
||||||
|
mongoserver.start().then(() => {
|
||||||
|
mongoserver.waitUntilRunning().then(() => {
|
||||||
|
const opts = {
|
||||||
|
mongodb: {
|
||||||
|
replicaSetHosts: 'localhost:27019',
|
||||||
|
writeConcern: 'majority',
|
||||||
|
replicaSet: 'rs0',
|
||||||
|
readPreference: 'primary',
|
||||||
|
database: DB_NAME,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
|
||||||
|
metadata.setup(done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(done => {
|
||||||
|
async.series([
|
||||||
|
next => metadata.close(next),
|
||||||
|
next => mongoserver.stop()
|
||||||
|
.then(() => next())
|
||||||
|
.catch(next),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
variations.forEach(variation => {
|
||||||
|
const itOnlyInV1 = variation.vFormat === 'v1' && variation.versioning ? it : it.skip;
|
||||||
|
describe(`vFormat : ${variation.vFormat}, versioning: ${variation.versioning}`, () => {
|
||||||
|
let paramsArr = [];
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
// reset params
|
||||||
|
paramsArr = Array.from({ length: N }, (_, i) => ({
|
||||||
|
key: `pfx1-test-object${i + 1}`,
|
||||||
|
objVal: {
|
||||||
|
key: `pfx1-test-object${i + 1}`,
|
||||||
|
versionId: 'null',
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
const bucketMD = BucketInfo.fromObj({
|
||||||
|
_name: BUCKET_NAME,
|
||||||
|
_owner: 'testowner',
|
||||||
|
_ownerDisplayName: 'testdisplayname',
|
||||||
|
_creationDate: new Date().toJSON(),
|
||||||
|
_acl: {
|
||||||
|
Canned: 'private',
|
||||||
|
FULL_CONTROL: [],
|
||||||
|
WRITE: [],
|
||||||
|
WRITE_ACP: [],
|
||||||
|
READ: [],
|
||||||
|
READ_ACP: [],
|
||||||
|
},
|
||||||
|
_mdBucketModelVersion: 10,
|
||||||
|
_transient: false,
|
||||||
|
_deleted: false,
|
||||||
|
_serverSideEncryption: null,
|
||||||
|
_versioningConfiguration: null,
|
||||||
|
_locationConstraint: 'us-east-1',
|
||||||
|
_readLocationConstraint: null,
|
||||||
|
_cors: null,
|
||||||
|
_replicationConfiguration: null,
|
||||||
|
_lifecycleConfiguration: null,
|
||||||
|
_uid: '',
|
||||||
|
_isNFS: null,
|
||||||
|
ingestion: null,
|
||||||
|
});
|
||||||
|
const versionParams = {
|
||||||
|
versioning: variation.versioning,
|
||||||
|
versionId: null,
|
||||||
|
repairMaster: null,
|
||||||
|
};
|
||||||
|
async.series([
|
||||||
|
next => {
|
||||||
|
metadata.client.defaultBucketKeyFormat = variation.vFormat;
|
||||||
|
return next();
|
||||||
|
},
|
||||||
|
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
collection = metadata.client.getCollection(BUCKET_NAME);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
next => {
|
||||||
|
async.eachSeries(paramsArr, (params, eachCb) => {
|
||||||
|
metadata.putObjectMD(BUCKET_NAME, params.key, params.objVal,
|
||||||
|
versionParams, logger, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return eachCb(err);
|
||||||
|
}
|
||||||
|
if (variation.versioning) {
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
params.versionId = JSON.parse(res).versionId;
|
||||||
|
}
|
||||||
|
return eachCb(null);
|
||||||
|
});
|
||||||
|
}, next);
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
metadata.putObjectMD(BUCKET_NAME, paramsArr[N - 1].key, paramsArr[N - 1].objVal,
|
||||||
|
versionParams, logger, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
if (variation.versioning) {
|
||||||
|
versionId2 = JSON.parse(res).versionId;
|
||||||
|
} else {
|
||||||
|
versionId2 = 'null';
|
||||||
|
}
|
||||||
|
return next(null);
|
||||||
|
});
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(done => {
|
||||||
|
metadata.deleteBucket(BUCKET_NAME, logger, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it(`should get ${N} objects${variation.versioning ? '' : ' master'} versions using batching`, done => {
|
||||||
|
const request = paramsArr.map(({ key, objVal }) => ({
|
||||||
|
key,
|
||||||
|
params: {
|
||||||
|
versionId: variation.versioning ? objVal.versionId : null,
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
metadata.getObjectsMD(BUCKET_NAME, request, logger, (err, objects) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
assert.strictEqual(objects.length, N);
|
||||||
|
objects.forEach((obj, i) => {
|
||||||
|
assert.strictEqual(obj.doc.key, paramsArr[i].key);
|
||||||
|
if (variation.versioning) {
|
||||||
|
assert.strictEqual(obj.doc.versionId, paramsArr[i].objVal.versionId);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not throw an error if object or version is inexistent and return null doc', done => {
|
||||||
|
const request = [{
|
||||||
|
key: 'nonexistent',
|
||||||
|
params: {
|
||||||
|
versionId: variation.versioning ? 'nonexistent' : null,
|
||||||
|
},
|
||||||
|
}];
|
||||||
|
metadata.getObjectsMD(BUCKET_NAME, request, logger, (err, objects) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
assert.strictEqual(objects.length, 1);
|
||||||
|
assert.strictEqual(objects[0].doc, null);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it(`should return latest version when master is PHD ${variation.it}`, done => {
|
||||||
|
if (!variation.versioning) {
|
||||||
|
return done();
|
||||||
|
}
|
||||||
|
const request = paramsArr.map(({ key, objVal }) => ({
|
||||||
|
key,
|
||||||
|
params: {
|
||||||
|
versionId: variation.versioning ? objVal.versionId : null,
|
||||||
|
},
|
||||||
|
}));
|
||||||
|
return async.series([
|
||||||
|
next => {
|
||||||
|
let objectName = null;
|
||||||
|
if (variations.versioning) {
|
||||||
|
objectName =
|
||||||
|
formatVersionKey(paramsArr[N - 1].key, paramsArr[N - 1].versionId, variation.vFormat);
|
||||||
|
} else {
|
||||||
|
objectName = formatMasterKey(paramsArr[N - 1].key, variation.vFormat);
|
||||||
|
}
|
||||||
|
// adding isPHD flag to master
|
||||||
|
const phdVersionId = generateVersionId();
|
||||||
|
paramsArr[N - 1].objVal.versionId = phdVersionId;
|
||||||
|
paramsArr[N - 1].objVal.isPHD = true;
|
||||||
|
updateMasterObject(objectName, phdVersionId, paramsArr[N - 1].objVal,
|
||||||
|
variation.vFormat, next);
|
||||||
|
},
|
||||||
|
// Should return latest object version
|
||||||
|
next => metadata.getObjectsMD(BUCKET_NAME, request, logger, (err, objects) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
objects.forEach((obj, i) => {
|
||||||
|
assert.strictEqual(obj.doc.key, paramsArr[i].objVal.key);
|
||||||
|
if (variation.versioning && i === N - 1) {
|
||||||
|
assert.strictEqual(obj.doc.versionId, versionId2);
|
||||||
|
} else {
|
||||||
|
assert.strictEqual(obj.doc.versionId, paramsArr[i].objVal.versionId);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
delete params.isPHD;
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fail to get an object tagged for deletion', done => {
|
||||||
|
const key = paramsArr[0].key;
|
||||||
|
flagObjectForDeletion(key, err => {
|
||||||
|
assert(err);
|
||||||
|
metadata.getObjectsMD(BUCKET_NAME, [{ key }], logger, (err, object) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
assert.strictEqual(object[0].doc, null);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
itOnlyInV1(`Should return last version when master deleted ${variation.vFormat}`, done => {
|
||||||
|
const versioningParams = {
|
||||||
|
versioning: true,
|
||||||
|
versionId: null,
|
||||||
|
repairMaster: null,
|
||||||
|
};
|
||||||
|
async.series([
|
||||||
|
// putting a delete marker as last version
|
||||||
|
next => {
|
||||||
|
paramsArr[0].versionId = null;
|
||||||
|
paramsArr[0].objVal.isDeleteMarker = true;
|
||||||
|
return metadata.putObjectMD(BUCKET_NAME, paramsArr[0].key, paramsArr[0].objVal,
|
||||||
|
versioningParams, logger, next);
|
||||||
|
},
|
||||||
|
next => metadata.getObjectsMD(BUCKET_NAME, [{ key: paramsArr[0].key }], logger, (err, objects) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
assert.strictEqual(objects[0].doc.key, paramsArr[0].key);
|
||||||
|
assert.strictEqual(objects[0].doc.isDeleteMarker, true);
|
||||||
|
paramsArr[0].objVal.isDeleteMarker = null;
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,744 @@
|
||||||
|
const async = require('async');
|
||||||
|
const assert = require('assert');
|
||||||
|
const werelogs = require('werelogs');
|
||||||
|
const { MongoMemoryReplSet } = require('mongodb-memory-server');
|
||||||
|
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
|
||||||
|
const MetadataWrapper =
|
||||||
|
require('../../../../../lib/storage/metadata/MetadataWrapper');
|
||||||
|
const { versioning } = require('../../../../../index');
|
||||||
|
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
|
||||||
|
const { assertContents, flagObjectForDeletion, makeBucketMD, putBulkObjectVersions } = require('./utils');
|
||||||
|
|
||||||
|
const IMPL_NAME = 'mongodb';
|
||||||
|
const DB_NAME = 'metadata';
|
||||||
|
const BUCKET_NAME = 'test-lifecycle-list-current-bucket';
|
||||||
|
|
||||||
|
const mongoserver = new MongoMemoryReplSet({
|
||||||
|
debug: false,
|
||||||
|
instanceOpts: [
|
||||||
|
{ port: 27020 },
|
||||||
|
],
|
||||||
|
replSet: {
|
||||||
|
name: 'rs0',
|
||||||
|
count: 1,
|
||||||
|
DB_NAME,
|
||||||
|
storageEngine: 'ephemeralForTest',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
describe('MongoClientInterface::metadata.listLifecycleObject::current', () => {
|
||||||
|
let metadata;
|
||||||
|
let collection;
|
||||||
|
const expectedVersionIds = {};
|
||||||
|
const location1 = 'loc1';
|
||||||
|
const location2 = 'loc2';
|
||||||
|
|
||||||
|
beforeAll(done => {
|
||||||
|
mongoserver.start().then(() => {
|
||||||
|
mongoserver.waitUntilRunning().then(() => {
|
||||||
|
const opts = {
|
||||||
|
mongodb: {
|
||||||
|
replicaSetHosts: 'localhost:27020',
|
||||||
|
writeConcern: 'majority',
|
||||||
|
replicaSet: 'rs0',
|
||||||
|
readPreference: 'primary',
|
||||||
|
database: DB_NAME,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
|
||||||
|
metadata.setup(done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(done => {
|
||||||
|
async.series([
|
||||||
|
next => metadata.close(next),
|
||||||
|
next => mongoserver.stop()
|
||||||
|
.then(() => next())
|
||||||
|
.catch(next),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
[BucketVersioningKeyFormat.v0, BucketVersioningKeyFormat.v1].forEach(v => {
|
||||||
|
describe(`bucket format version: ${v}`, () => {
|
||||||
|
beforeEach(done => {
|
||||||
|
const bucketMD = makeBucketMD(BUCKET_NAME);
|
||||||
|
const versionParams = {
|
||||||
|
versioning: true,
|
||||||
|
versionId: null,
|
||||||
|
repairMaster: null,
|
||||||
|
};
|
||||||
|
metadata.client.defaultBucketKeyFormat = v;
|
||||||
|
async.series([
|
||||||
|
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
collection = metadata.client.getCollection(BUCKET_NAME);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
next => {
|
||||||
|
const objName = 'pfx1-test-object';
|
||||||
|
const objVal = {
|
||||||
|
key: 'pfx1-test-object',
|
||||||
|
versionId: 'null',
|
||||||
|
dataStoreName: location1,
|
||||||
|
};
|
||||||
|
const nbVersions = 5;
|
||||||
|
|
||||||
|
const timestamp = 0;
|
||||||
|
putBulkObjectVersions(metadata, BUCKET_NAME, objName, objVal, versionParams,
|
||||||
|
nbVersions, timestamp, logger, (err, data) => {
|
||||||
|
expectedVersionIds[objName] = data.lastVersionId;
|
||||||
|
return next(err);
|
||||||
|
});
|
||||||
|
/* eslint-disable max-len */
|
||||||
|
// The following versions are created:
|
||||||
|
// { "_id" : "Mpfx1-test-object", "value" : { "key" : "pfx1-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
|
||||||
|
// { "_id" : "Vpfx1-test-object{sep}id4", "value" : { "key" : "pfx1-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
|
||||||
|
// { "_id" : "Vpfx1-test-object{sep}id3", "value" : { "key" : "pfx1-test-object", "versionId" : "vid3", "last-modified" : "1970-01-01T00:00:00.004Z" } }
|
||||||
|
// { "_id" : "Vpfx1-test-object{sep}id2", "value" : { "key" : "pfx1-test-object", "versionId" : "vid2", "last-modified" : "1970-01-01T00:00:00.003Z" } }
|
||||||
|
// { "_id" : "Vpfx1-test-object{sep}id1", "value" : { "key" : "pfx1-test-object", "versionId" : "vid1", "last-modified" : "1970-01-01T00:00:00.002Z" } }
|
||||||
|
// { "_id" : "Vpfx1-test-object{sep}id0", "value" : { "key" : "pfx1-test-object", "versionId" : "vid0", "last-modified" : "1970-01-01T00:00:00.001Z" } }
|
||||||
|
/* eslint-enable max-len */
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
const objName = 'pfx2-test-object';
|
||||||
|
const objVal = {
|
||||||
|
key: 'pfx2-test-object',
|
||||||
|
versionId: 'null',
|
||||||
|
dataStoreName: location2,
|
||||||
|
};
|
||||||
|
const nbVersions = 5;
|
||||||
|
const timestamp = 2000;
|
||||||
|
putBulkObjectVersions(metadata, BUCKET_NAME, objName, objVal, versionParams,
|
||||||
|
nbVersions, timestamp, logger, (err, data) => {
|
||||||
|
expectedVersionIds[objName] = data.lastVersionId;
|
||||||
|
return next(err);
|
||||||
|
});
|
||||||
|
/* eslint-disable max-len */
|
||||||
|
// The following versions are created:
|
||||||
|
// { "_id" : "Mpfx2-test-object", "value" : { "key" : "pfx2-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:02.005Z" } }
|
||||||
|
// { "_id" : "Vpfx2-test-object{sep}id4", "value" : { "key" : "pfx2-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:02.005Z" } }
|
||||||
|
// { "_id" : "Vpfx2-test-object{sep}id3", "value" : { "key" : "pfx2-test-object", "versionId" : "vid3", "last-modified" : "1970-01-01T00:00:02.004Z" } }
|
||||||
|
// { "_id" : "Vpfx2-test-object{sep}id2", "value" : { "key" : "pfx2-test-object", "versionId" : "vid2", "last-modified" : "1970-01-01T00:00:02.003Z" } }
|
||||||
|
// { "_id" : "Vpfx2-test-object{sep}id1", "value" : { "key" : "pfx2-test-object", "versionId" : "vid1", "last-modified" : "1970-01-01T00:00:02.002Z" } }
|
||||||
|
// { "_id" : "Vpfx1-test-object{sep}id0", "value" : { "key" : "pfx2-test-object", "versionId" : "vid0", "last-modified" : "1970-01-01T00:00:02.001Z" } }
|
||||||
|
/* eslint-enable max-len */
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
const objName = 'pfx3-test-object';
|
||||||
|
const objVal = {
|
||||||
|
key: 'pfx3-test-object',
|
||||||
|
versionId: 'null',
|
||||||
|
dataStoreName: location1,
|
||||||
|
};
|
||||||
|
const nbVersions = 5;
|
||||||
|
const timestamp = 1000;
|
||||||
|
putBulkObjectVersions(metadata, BUCKET_NAME, objName, objVal, versionParams,
|
||||||
|
nbVersions, timestamp, logger, (err, data) => {
|
||||||
|
expectedVersionIds[objName] = data.lastVersionId;
|
||||||
|
return next(err);
|
||||||
|
});
|
||||||
|
/* eslint-disable max-len */
|
||||||
|
// The following versions are created:
|
||||||
|
// { "_id" : "Mpfx3-test-object", "value" : { "key" : "pfx3-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:01.005Z" } }
|
||||||
|
// { "_id" : "Vpfx3-test-object{sep}id4", "value" : { "key" : "pfx3-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:01.005Z" } }
|
||||||
|
// { "_id" : "Vpfx3-test-object{sep}id3", "value" : { "key" : "pfx3-test-object", "versionId" : "vid3", "last-modified" : "1970-01-01T00:00:01.004Z" } }
|
||||||
|
// { "_id" : "Vpfx3-test-object{sep}id2", "value" : { "key" : "pfx3-test-object", "versionId" : "vid2", "last-modified" : "1970-01-01T00:00:01.003Z" } }
|
||||||
|
// { "_id" : "Vpfx3-test-object{sep}id1", "value" : { "key" : "pfx3-test-object", "versionId" : "vid1", "last-modified" : "1970-01-01T00:00:01.002Z" } }
|
||||||
|
// { "_id" : "Vpfx3-test-object{sep}id0", "value" : { "key" : "pfx3-test-object", "versionId" : "vid0", "last-modified" : "1970-01-01T00:00:01.001Z" } }
|
||||||
|
/* eslint-enable max-len */
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(done => {
|
||||||
|
metadata.deleteBucket(BUCKET_NAME, logger, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should list current versions of objects', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 3);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx1-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:00.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx1-test-object'],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'pfx2-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:02.005Z',
|
||||||
|
dataStoreName: location2,
|
||||||
|
VersionId: expectedVersionIds['pfx2-test-object'],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'pfx3-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:01.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx3-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should list current versions of objects excluding keys stored in location2', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
excludedDataStoreName: location2,
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 2);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx1-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:00.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx1-test-object'],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'pfx3-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:01.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx3-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should list current versions of objects excluding keys stored in location1', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
excludedDataStoreName: location1,
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx2-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:02.005Z',
|
||||||
|
dataStoreName: location2,
|
||||||
|
VersionId: expectedVersionIds['pfx2-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should list current versions of objects with prefix and excluding keys stored in location2', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
excludedDataStoreName: location2,
|
||||||
|
prefix: 'pfx3',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx3-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:01.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx3-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return trucated list of current versions excluding keys stored in location2', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
excludedDataStoreName: location2,
|
||||||
|
maxKeys: 1,
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
assert.strictEqual(data.NextMarker, 'pfx1-test-object');
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx1-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:00.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx1-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
params.marker = 'pfx1-test-object';
|
||||||
|
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx3-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:01.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx3-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return empty list when beforeDate is before the objects creation date', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
beforeDate: '1970-01-01T00:00:00.000Z',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 0);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return the current version modified before 1970-01-01T00:00:00.010Z', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
beforeDate: '1970-01-01T00:00:00.10Z',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx1-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:00.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx1-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return the current versions modified before 1970-01-01T00:00:01.010Z', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
beforeDate: '1970-01-01T00:00:01.010Z',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 2);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx1-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:00.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx1-test-object'],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'pfx3-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:01.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx3-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return the current versions modified before 1970-01-01T00:00:02.010Z', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
beforeDate: '1970-01-01T00:00:02.010Z',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 3);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx1-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:00.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx1-test-object'],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'pfx2-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:02.005Z',
|
||||||
|
dataStoreName: location2,
|
||||||
|
VersionId: expectedVersionIds['pfx2-test-object'],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'pfx3-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:01.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx3-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should truncate the list of current versions modified before 1970-01-01T00:00:01.010Z', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
beforeDate: '1970-01-01T00:00:01.010Z',
|
||||||
|
maxKeys: 1,
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
assert.strictEqual(data.NextMarker, 'pfx1-test-object');
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx1-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:00.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx1-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
params.marker = 'pfx1-test-object';
|
||||||
|
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx3-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:01.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx3-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should truncate list of current versions of objects', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
maxKeys: 2,
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.NextMarker, 'pfx2-test-object');
|
||||||
|
assert.strictEqual(data.Contents.length, 2);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx1-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:00.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx1-test-object'],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'pfx2-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:02.005Z',
|
||||||
|
dataStoreName: location2,
|
||||||
|
VersionId: expectedVersionIds['pfx2-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should list the following current versions of objects', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
marker: 'pfx2-test-object',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx3-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:01.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx3-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should list current versions that start with prefix', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
prefix: 'pfx2',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx2-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:02.005Z',
|
||||||
|
dataStoreName: location2,
|
||||||
|
VersionId: expectedVersionIds['pfx2-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return the list of current versions modified before 1970-01-01T00:00:01.010Z with prefix pfx1',
|
||||||
|
done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
beforeDate: '1970-01-01T00:00:01.010Z',
|
||||||
|
maxKeys: 1,
|
||||||
|
prefix: 'pfx1',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx1-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:00.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx1-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should not list deleted version', done => {
|
||||||
|
const objVal = {
|
||||||
|
'key': 'pfx4-test-object',
|
||||||
|
'last-modified': new Date(0).toISOString(),
|
||||||
|
};
|
||||||
|
const versionParams = {
|
||||||
|
versioning: true,
|
||||||
|
};
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
};
|
||||||
|
let deletedVersionId;
|
||||||
|
|
||||||
|
async.series([
|
||||||
|
next => metadata.putObjectMD(BUCKET_NAME, objVal.key, objVal, versionParams,
|
||||||
|
logger, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
deletedVersionId = JSON.parse(res).versionId;
|
||||||
|
return next(null);
|
||||||
|
}),
|
||||||
|
next => metadata.deleteObjectMD(BUCKET_NAME, objVal.key,
|
||||||
|
{ versionId: deletedVersionId }, logger, next),
|
||||||
|
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.Contents.length, 3);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx1-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:00.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx1-test-object'],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'pfx2-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:02.005Z',
|
||||||
|
dataStoreName: location2,
|
||||||
|
VersionId: expectedVersionIds['pfx2-test-object'],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'pfx3-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:01.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx3-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should not list object with delete marker', done => {
|
||||||
|
const objVal = {
|
||||||
|
'key': 'pfx4-test-object',
|
||||||
|
'last-modified': new Date(0).toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
const dmObjVal = { ...objVal, isDeleteMarker: true };
|
||||||
|
const versionParams = {
|
||||||
|
versioning: true,
|
||||||
|
};
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
};
|
||||||
|
|
||||||
|
async.series([
|
||||||
|
next => metadata.putObjectMD(BUCKET_NAME, objVal.key, objVal, versionParams, logger, next),
|
||||||
|
next => metadata.putObjectMD(BUCKET_NAME, objVal.key, dmObjVal, versionParams, logger, next),
|
||||||
|
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.Contents.length, 3);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx1-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:00.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx1-test-object'],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'pfx2-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:02.005Z',
|
||||||
|
dataStoreName: location2,
|
||||||
|
VersionId: expectedVersionIds['pfx2-test-object'],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'pfx3-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:01.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx3-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should not list phd master key when listing current versions', done => {
|
||||||
|
const objVal = {
|
||||||
|
'key': 'pfx4-test-object',
|
||||||
|
'versionId': 'null',
|
||||||
|
'last-modified': new Date(0).toISOString(),
|
||||||
|
};
|
||||||
|
const versionParams = {
|
||||||
|
versioning: true,
|
||||||
|
};
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
prefix: 'pfx4',
|
||||||
|
};
|
||||||
|
let versionId;
|
||||||
|
let lastVersionId;
|
||||||
|
async.series([
|
||||||
|
next => metadata.putObjectMD(BUCKET_NAME, 'pfx4-test-object', objVal, versionParams,
|
||||||
|
logger, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
versionId = JSON.parse(res).versionId;
|
||||||
|
return next(null);
|
||||||
|
}),
|
||||||
|
next => metadata.putObjectMD(BUCKET_NAME, 'pfx4-test-object', objVal, versionParams,
|
||||||
|
logger, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
lastVersionId = JSON.parse(res).versionId;
|
||||||
|
return next(null);
|
||||||
|
}),
|
||||||
|
next => metadata.deleteObjectMD(BUCKET_NAME, 'pfx4-test-object', { versionId: lastVersionId },
|
||||||
|
logger, next),
|
||||||
|
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.Contents[0].value.VersionId, versionId);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should not list the current version tagged for deletion', done => {
|
||||||
|
const objVal = {
|
||||||
|
'key': 'pfx4-test-object',
|
||||||
|
'last-modified': new Date(0).toISOString(),
|
||||||
|
};
|
||||||
|
const versionParams = {
|
||||||
|
versioning: true,
|
||||||
|
};
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
};
|
||||||
|
async.series([
|
||||||
|
next => metadata.putObjectMD(BUCKET_NAME, objVal.key, objVal, versionParams,
|
||||||
|
logger, next),
|
||||||
|
next => flagObjectForDeletion(collection, objVal.key, next),
|
||||||
|
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.Contents.length, 3);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: 'pfx1-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:00.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx1-test-object'],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'pfx2-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:02.005Z',
|
||||||
|
dataStoreName: location2,
|
||||||
|
VersionId: expectedVersionIds['pfx2-test-object'],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'pfx3-test-object',
|
||||||
|
LastModified: '1970-01-01T00:00:01.005Z',
|
||||||
|
dataStoreName: location1,
|
||||||
|
VersionId: expectedVersionIds['pfx3-test-object'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue