Compare commits
No commits in common. "development/8.1" and "html-doc" have entirely different histories.
developmen
...
html-doc
|
@ -1,6 +0,0 @@
|
||||||
{
|
|
||||||
"extends": "scality",
|
|
||||||
"parserOptions": {
|
|
||||||
"ecmaVersion": 2020
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,10 +0,0 @@
|
||||||
---
|
|
||||||
version: 2
|
|
||||||
updates:
|
|
||||||
- package-ecosystem: npm
|
|
||||||
directory: "/"
|
|
||||||
schedule:
|
|
||||||
interval: daily
|
|
||||||
time: "13:00"
|
|
||||||
open-pull-requests-limit: 10
|
|
||||||
target-branch: "development/7.4"
|
|
|
@ -1,25 +0,0 @@
|
||||||
---
|
|
||||||
name: codeQL
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [development/*, stabilization/*, hotfix/*]
|
|
||||||
pull_request:
|
|
||||||
branches: [development/*, stabilization/*, hotfix/*]
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
analyze:
|
|
||||||
name: Static analysis with CodeQL
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
|
||||||
uses: github/codeql-action/init@v3
|
|
||||||
with:
|
|
||||||
languages: javascript, typescript
|
|
||||||
|
|
||||||
- name: Build and analyze
|
|
||||||
uses: github/codeql-action/analyze@v3
|
|
|
@ -1,16 +0,0 @@
|
||||||
---
|
|
||||||
name: dependency review
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
branches: [development/*, stabilization/*, hotfix/*]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
dependency-review:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: 'Checkout Repository'
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: 'Dependency Review'
|
|
||||||
uses: actions/dependency-review-action@v4
|
|
|
@ -1,82 +0,0 @@
|
||||||
---
|
|
||||||
name: tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches-ignore:
|
|
||||||
- 'development/**'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
services:
|
|
||||||
# Label used to access the service container
|
|
||||||
redis:
|
|
||||||
# Docker Hub image
|
|
||||||
image: redis
|
|
||||||
# Set health checks to wait until redis has started
|
|
||||||
options: >-
|
|
||||||
--health-cmd "redis-cli ping"
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
ports:
|
|
||||||
# Maps port 6379 on service container to the host
|
|
||||||
- 6379:6379
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: '16'
|
|
||||||
cache: 'yarn'
|
|
||||||
- name: install dependencies
|
|
||||||
run: yarn install --frozen-lockfile --prefer-offline --network-concurrency 1
|
|
||||||
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
|
||||||
- name: lint yaml
|
|
||||||
run: yarn --silent lint_yml
|
|
||||||
- name: lint javascript
|
|
||||||
run: yarn --silent lint --max-warnings 0
|
|
||||||
- name: lint markdown
|
|
||||||
run: yarn --silent lint_md
|
|
||||||
- name: add hostname
|
|
||||||
run: |
|
|
||||||
sudo sh -c "echo '127.0.0.1 testrequestbucket.localhost' >> /etc/hosts"
|
|
||||||
- name: test and coverage
|
|
||||||
run: yarn --silent coverage
|
|
||||||
- name: run functional tests
|
|
||||||
run: yarn ft_test
|
|
||||||
- uses: codecov/codecov-action@v4
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
- name: run executables tests
|
|
||||||
run: yarn install && yarn test
|
|
||||||
working-directory: 'lib/executables/pensieveCreds/'
|
|
||||||
|
|
||||||
compile:
|
|
||||||
name: Compile and upload build artifacts
|
|
||||||
needs: test
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: Install NodeJS
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: '16'
|
|
||||||
cache: yarn
|
|
||||||
- name: Install dependencies
|
|
||||||
run: yarn install --frozen-lockfile --prefer-offline
|
|
||||||
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
|
||||||
- name: Compile
|
|
||||||
run: yarn build
|
|
||||||
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
|
||||||
- name: Upload artifacts
|
|
||||||
uses: scality/action-artifacts@v4
|
|
||||||
with:
|
|
||||||
url: https://artifacts.scality.net
|
|
||||||
user: ${{ secrets.ARTIFACTS_USER }}
|
|
||||||
password: ${{ secrets.ARTIFACTS_PASSWORD }}
|
|
||||||
source: ./build
|
|
||||||
method: upload
|
|
||||||
if: success()
|
|
|
@ -1,18 +0,0 @@
|
||||||
# Logs
|
|
||||||
*.log
|
|
||||||
|
|
||||||
# Dependency directory
|
|
||||||
node_modules/
|
|
||||||
*/node_modules/
|
|
||||||
|
|
||||||
# Build executables
|
|
||||||
*-win.exe
|
|
||||||
*-linux
|
|
||||||
*-macos
|
|
||||||
|
|
||||||
# Coverage
|
|
||||||
coverage/
|
|
||||||
.nyc_output/
|
|
||||||
|
|
||||||
# TypeScript
|
|
||||||
build/
|
|
12
.swcrc
|
@ -1,12 +0,0 @@
|
||||||
{
|
|
||||||
"$schema": "https://swc.rs/schema.json",
|
|
||||||
"jsc": {
|
|
||||||
"parser": {
|
|
||||||
"syntax": "typescript"
|
|
||||||
},
|
|
||||||
"target": "es2017"
|
|
||||||
},
|
|
||||||
"module": {
|
|
||||||
"type": "commonjs"
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,5 +0,0 @@
|
||||||
# Contributing rules
|
|
||||||
|
|
||||||
Please follow the
|
|
||||||
[Contributing Guidelines](
|
|
||||||
https://github.com/scality/Guidelines/blob/master/CONTRIBUTING.md).
|
|
150
README.md
|
@ -1,150 +0,0 @@
|
||||||
# Arsenal
|
|
||||||
|
|
||||||
[![codecov](https://codecov.io/gh/scality/Arsenal/branch/development/8.1/graph/badge.svg?token=X0esXhJSwb)](https://codecov.io/gh/scality/Arsenal)
|
|
||||||
|
|
||||||
Common utilities for the S3 project components
|
|
||||||
|
|
||||||
Within this repository, you will be able to find the shared libraries for the
|
|
||||||
multiple components making up the whole Project.
|
|
||||||
|
|
||||||
* [Guidelines](#guidelines)
|
|
||||||
* [Shuffle](#shuffle) to shuffle an array.
|
|
||||||
* [Errors](#errors) load an object of errors instances.
|
|
||||||
- [errors/arsenalErrors.json](errors/arsenalErrors.json)
|
|
||||||
|
|
||||||
## Guidelines
|
|
||||||
|
|
||||||
Please read our coding and workflow guidelines at
|
|
||||||
[scality/Guidelines](https://github.com/scality/Guidelines).
|
|
||||||
|
|
||||||
### Contributing
|
|
||||||
|
|
||||||
In order to contribute, please follow the
|
|
||||||
[Contributing Guidelines](
|
|
||||||
https://github.com/scality/Guidelines/blob/master/CONTRIBUTING.md).
|
|
||||||
|
|
||||||
## Shuffle
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
``` js
|
|
||||||
import { shuffle } from 'arsenal';
|
|
||||||
|
|
||||||
let array = [1, 2, 3, 4, 5];
|
|
||||||
|
|
||||||
shuffle(array);
|
|
||||||
|
|
||||||
console.log(array);
|
|
||||||
|
|
||||||
//[5, 3, 1, 2, 4]
|
|
||||||
```
|
|
||||||
|
|
||||||
## Errors
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
``` js
|
|
||||||
import { errors } from 'arsenal';
|
|
||||||
|
|
||||||
console.log(errors.AccessDenied);
|
|
||||||
|
|
||||||
//{ [Error: AccessDenied]
|
|
||||||
// code: 403,
|
|
||||||
// description: 'Access Denied',
|
|
||||||
// AccessDenied: true }
|
|
||||||
|
|
||||||
```
|
|
||||||
|
|
||||||
## Clustering
|
|
||||||
|
|
||||||
The clustering class can be used to set up a cluster of workers. The class will
|
|
||||||
create at least 1 worker, will log any worker event (started, exited).
|
|
||||||
The class also provides a watchdog which restarts the workers in case of
|
|
||||||
failure until the stop() method is called.
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
#### Simple
|
|
||||||
|
|
||||||
```
|
|
||||||
import { Clustering } from 'arsenal';
|
|
||||||
|
|
||||||
const cluster = new Clustering(clusterSize, logger);
|
|
||||||
cluster.start(current => {
|
|
||||||
// Put here the logic of every worker.
|
|
||||||
// 'current' is the Clustering instance, worker id is accessible by
|
|
||||||
// current.getIndex()
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
The callback will be called every time a worker is started/restarted.
|
|
||||||
|
|
||||||
#### Handle exit
|
|
||||||
|
|
||||||
```
|
|
||||||
import { Clustering } from 'arsenal';
|
|
||||||
|
|
||||||
const cluster = new Clustering(clusterSize, logger);
|
|
||||||
cluster.start(current => {
|
|
||||||
// Put here the logic of every worker.
|
|
||||||
// 'current' is the Clustering instance, worker id is accessible by
|
|
||||||
// current.getIndex()
|
|
||||||
}).onExit(current => {
|
|
||||||
if (current.isMaster()) {
|
|
||||||
// Master process exiting
|
|
||||||
} else {
|
|
||||||
const id = current.getIndex();
|
|
||||||
// Worker (id) exiting
|
|
||||||
}
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
You can handle exit event on both master and workers by calling the
|
|
||||||
'onExit' method and setting the callback. This allows release of resources
|
|
||||||
or save state before exiting the process.
|
|
||||||
|
|
||||||
#### Silencing a signal
|
|
||||||
|
|
||||||
```
|
|
||||||
import { Clustering } from 'arsenal';
|
|
||||||
|
|
||||||
const cluster = new Clustering(clusterSize, logger);
|
|
||||||
cluster.start(current => {
|
|
||||||
// Put here the logic of every worker.
|
|
||||||
// 'current' is the Clustering instance, worker id is accessible by
|
|
||||||
// current.getIndex()
|
|
||||||
}).onExit((current, signal) => {
|
|
||||||
if (signal !== 'SIGTERM') {
|
|
||||||
process.exit(current.getStatus());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
You can silence stop signals, by simply not exiting on the exit callback
|
|
||||||
|
|
||||||
#### Shutdown timeout
|
|
||||||
|
|
||||||
```
|
|
||||||
import { Clustering } from 'arsenal';
|
|
||||||
|
|
||||||
const cluster = new Clustering(clusterSize, logger, 1000);
|
|
||||||
cluster.start(current => {
|
|
||||||
// Put here the logic of every worker.
|
|
||||||
// 'current' is the Clustering instance, worker id is accessible by
|
|
||||||
// current.getIndex()
|
|
||||||
}).onExit((current, signal) => {
|
|
||||||
if (signal === 'SIGTERM') {
|
|
||||||
// releasing resources
|
|
||||||
}
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
By default, the shutdown timeout is set to 5000 milliseconds. This timeout is
|
|
||||||
used only when you explicitly call the stop() method. This window is
|
|
||||||
used to let the application release its resources, but if timeout occurs
|
|
||||||
before the application has finished it's cleanup, a 'SIGKILL' signal is send
|
|
||||||
to the process (which results in an immediate termination, and this signal
|
|
||||||
can't be caught).
|
|
||||||
|
|
||||||
[badgepub]: https://circleci.com/gh/scality/Arsenal.svg?style=svg
|
|
||||||
[badgepriv]: http://ci.ironmann.io/gh/scality/Arsenal.svg?style=svg&circle-token=c3d2570682cba6763a97ea0bc87521941413d75c
|
|
|
@ -1,6 +0,0 @@
|
||||||
module.exports = {
|
|
||||||
presets: [
|
|
||||||
['@babel/preset-env', { targets: { node: 'current' } }],
|
|
||||||
'@babel/preset-typescript',
|
|
||||||
],
|
|
||||||
};
|
|
|
@ -0,0 +1,17 @@
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="102" height="20">
|
||||||
|
<script/>
|
||||||
|
<linearGradient id="a" x2="0" y2="100%">
|
||||||
|
<stop offset="0" stop-color="#bbb" stop-opacity=".1"/>
|
||||||
|
<stop offset="1" stop-opacity=".1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<rect rx="3" width="102" height="20" fill="#555"/>
|
||||||
|
<rect rx="3" x="64" width="38" height="20" fill="#dab226"/>
|
||||||
|
<path fill="#dab226" d="M64 0h4v20h-4z"/>
|
||||||
|
<rect rx="3" width="102" height="20" fill="url(#a)"/>
|
||||||
|
<g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11">
|
||||||
|
<text x="32" y="15" fill="#010101" fill-opacity=".3">document</text>
|
||||||
|
<text x="32" y="14">document</text>
|
||||||
|
<text x="82.5" y="15" fill="#010101" fill-opacity=".3">87%</text>
|
||||||
|
<text x="82.5" y="14">87%</text>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 795 B |
28
circle.yml
|
@ -1,28 +0,0 @@
|
||||||
---
|
|
||||||
general:
|
|
||||||
branches:
|
|
||||||
ignore:
|
|
||||||
- /^ultron\/.*/ # Ignore ultron/* branches
|
|
||||||
|
|
||||||
machine:
|
|
||||||
node:
|
|
||||||
version: 6.13.1
|
|
||||||
services:
|
|
||||||
- redis
|
|
||||||
environment:
|
|
||||||
CXX: g++-4.9
|
|
||||||
|
|
||||||
dependencies:
|
|
||||||
override:
|
|
||||||
- rm -rf node_modules
|
|
||||||
- npm install
|
|
||||||
- sudo pip install yamllint
|
|
||||||
|
|
||||||
test:
|
|
||||||
override:
|
|
||||||
- npm run --silent lint_yml
|
|
||||||
- npm run --silent lint -- --max-warnings 0
|
|
||||||
- npm run --silent lint_md
|
|
||||||
- npm run --silent test
|
|
||||||
- npm run ft_test
|
|
||||||
- cd lib/executables/pensieveCreds && npm install && npm test
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
{
|
||||||
|
"coverage": "87.23%",
|
||||||
|
"expectCount": 47,
|
||||||
|
"actualCount": 41,
|
||||||
|
"files": {
|
||||||
|
"kinetic/Kinetic.js": {
|
||||||
|
"expectCount": 47,
|
||||||
|
"actualCount": 41,
|
||||||
|
"undocumentLines": [
|
||||||
|
66,
|
||||||
|
13,
|
||||||
|
41,
|
||||||
|
683,
|
||||||
|
15,
|
||||||
|
25
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,132 @@
|
||||||
|
/* Tomorrow Theme */
|
||||||
|
/* Original theme - https://github.com/chriskempson/tomorrow-theme */
|
||||||
|
/* Pretty printing styles. Used with prettify.js. */
|
||||||
|
/* SPAN elements with the classes below are added by prettyprint. */
|
||||||
|
/* plain text */
|
||||||
|
.pln {
|
||||||
|
color: #4d4d4c; }
|
||||||
|
|
||||||
|
@media screen {
|
||||||
|
/* string content */
|
||||||
|
.str {
|
||||||
|
color: #718c00; }
|
||||||
|
|
||||||
|
/* a keyword */
|
||||||
|
.kwd {
|
||||||
|
color: #8959a8; }
|
||||||
|
|
||||||
|
/* a comment */
|
||||||
|
.com {
|
||||||
|
color: #8e908c; }
|
||||||
|
|
||||||
|
/* a type name */
|
||||||
|
.typ {
|
||||||
|
color: #4271ae; }
|
||||||
|
|
||||||
|
/* a literal value */
|
||||||
|
.lit {
|
||||||
|
color: #f5871f; }
|
||||||
|
|
||||||
|
/* punctuation */
|
||||||
|
.pun {
|
||||||
|
color: #4d4d4c; }
|
||||||
|
|
||||||
|
/* lisp open bracket */
|
||||||
|
.opn {
|
||||||
|
color: #4d4d4c; }
|
||||||
|
|
||||||
|
/* lisp close bracket */
|
||||||
|
.clo {
|
||||||
|
color: #4d4d4c; }
|
||||||
|
|
||||||
|
/* a markup tag name */
|
||||||
|
.tag {
|
||||||
|
color: #c82829; }
|
||||||
|
|
||||||
|
/* a markup attribute name */
|
||||||
|
.atn {
|
||||||
|
color: #f5871f; }
|
||||||
|
|
||||||
|
/* a markup attribute value */
|
||||||
|
.atv {
|
||||||
|
color: #3e999f; }
|
||||||
|
|
||||||
|
/* a declaration */
|
||||||
|
.dec {
|
||||||
|
color: #f5871f; }
|
||||||
|
|
||||||
|
/* a variable name */
|
||||||
|
.var {
|
||||||
|
color: #c82829; }
|
||||||
|
|
||||||
|
/* a function name */
|
||||||
|
.fun {
|
||||||
|
color: #4271ae; } }
|
||||||
|
/* Use higher contrast and text-weight for printable form. */
|
||||||
|
@media print, projection {
|
||||||
|
.str {
|
||||||
|
color: #060; }
|
||||||
|
|
||||||
|
.kwd {
|
||||||
|
color: #006;
|
||||||
|
font-weight: bold; }
|
||||||
|
|
||||||
|
.com {
|
||||||
|
color: #600;
|
||||||
|
font-style: italic; }
|
||||||
|
|
||||||
|
.typ {
|
||||||
|
color: #404;
|
||||||
|
font-weight: bold; }
|
||||||
|
|
||||||
|
.lit {
|
||||||
|
color: #044; }
|
||||||
|
|
||||||
|
.pun, .opn, .clo {
|
||||||
|
color: #440; }
|
||||||
|
|
||||||
|
.tag {
|
||||||
|
color: #006;
|
||||||
|
font-weight: bold; }
|
||||||
|
|
||||||
|
.atn {
|
||||||
|
color: #404; }
|
||||||
|
|
||||||
|
.atv {
|
||||||
|
color: #060; } }
|
||||||
|
/* Style */
|
||||||
|
/*
|
||||||
|
pre.prettyprint {
|
||||||
|
background: white;
|
||||||
|
font-family: Consolas, Monaco, 'Andale Mono', monospace;
|
||||||
|
font-size: 12px;
|
||||||
|
line-height: 1.5;
|
||||||
|
border: 1px solid #ccc;
|
||||||
|
padding: 10px; }
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Specify class=linenums on a pre to get line numbering */
|
||||||
|
ol.linenums {
|
||||||
|
margin-top: 0;
|
||||||
|
margin-bottom: 0; }
|
||||||
|
|
||||||
|
/* IE indents via margin-left */
|
||||||
|
li.L0,
|
||||||
|
li.L1,
|
||||||
|
li.L2,
|
||||||
|
li.L3,
|
||||||
|
li.L4,
|
||||||
|
li.L5,
|
||||||
|
li.L6,
|
||||||
|
li.L7,
|
||||||
|
li.L8,
|
||||||
|
li.L9 {
|
||||||
|
/* */ }
|
||||||
|
|
||||||
|
/* Alternate shading for lines */
|
||||||
|
li.L1,
|
||||||
|
li.L3,
|
||||||
|
li.L5,
|
||||||
|
li.L7,
|
||||||
|
li.L9 {
|
||||||
|
/* */ }
|
|
@ -0,0 +1,944 @@
|
||||||
|
@import url(https://fonts.googleapis.com/css?family=Roboto:400,300,700);
|
||||||
|
|
||||||
|
* {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
html
|
||||||
|
{
|
||||||
|
font-family: 'Roboto', sans-serif;
|
||||||
|
overflow: auto;
|
||||||
|
font-size: 14px;
|
||||||
|
/*color: #4d4e53;*/
|
||||||
|
color: rgba(0, 0, 0, .68);
|
||||||
|
background-color: #fff;
|
||||||
|
}
|
||||||
|
|
||||||
|
a {
|
||||||
|
/*color: #0095dd;*/
|
||||||
|
/*color:rgb(37, 138, 175);*/
|
||||||
|
color: #039BE5;
|
||||||
|
}
|
||||||
|
|
||||||
|
code a:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul, ol {
|
||||||
|
padding-left: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
ul li {
|
||||||
|
list-style: disc;
|
||||||
|
margin: 4px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
ol li {
|
||||||
|
margin: 4px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1 {
|
||||||
|
margin-bottom: 10px;
|
||||||
|
font-size: 34px;
|
||||||
|
font-weight: 300;
|
||||||
|
border-bottom: solid 1px #ddd;
|
||||||
|
}
|
||||||
|
|
||||||
|
h2 {
|
||||||
|
margin-top: 24px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
font-size: 20px;
|
||||||
|
border-bottom: solid 1px #ddd;
|
||||||
|
font-weight: 300;
|
||||||
|
}
|
||||||
|
|
||||||
|
h3 {
|
||||||
|
position: relative;
|
||||||
|
font-size: 16px;
|
||||||
|
margin-bottom: 12px;
|
||||||
|
background-color: #E2E2E2;
|
||||||
|
padding: 4px;
|
||||||
|
font-weight: 300;
|
||||||
|
}
|
||||||
|
|
||||||
|
del {
|
||||||
|
text-decoration: line-through;
|
||||||
|
}
|
||||||
|
|
||||||
|
p {
|
||||||
|
margin-bottom: 15px;
|
||||||
|
line-height: 1.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
p > code {
|
||||||
|
background-color: #f5f5f5;
|
||||||
|
border-radius: 3px;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre > code {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre.prettyprint, pre > code {
|
||||||
|
padding: 4px;
|
||||||
|
margin: 1em 0;
|
||||||
|
background-color: #f5f5f5;
|
||||||
|
border-radius: 3px;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre.prettyprint > code {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
p > code,
|
||||||
|
li > code {
|
||||||
|
padding: 0 4px;
|
||||||
|
border-radius: 3px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.import-path pre.prettyprint,
|
||||||
|
.import-path pre.prettyprint code {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
border: none;
|
||||||
|
background: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.layout-container {
|
||||||
|
/*display: flex;*/
|
||||||
|
/*flex-direction: row;*/
|
||||||
|
/*justify-content: flex-start;*/
|
||||||
|
/*align-items: stretch;*/
|
||||||
|
}
|
||||||
|
|
||||||
|
.layout-container > header {
|
||||||
|
height: 40px;
|
||||||
|
line-height: 40px;
|
||||||
|
font-size: 16px;
|
||||||
|
padding: 0 10px;
|
||||||
|
margin: 0;
|
||||||
|
position: fixed;
|
||||||
|
width: 100%;
|
||||||
|
z-index: 1;
|
||||||
|
background-color: white;
|
||||||
|
top: 0;
|
||||||
|
border-bottom: solid 1px #E02130;
|
||||||
|
}
|
||||||
|
.layout-container > header > a{
|
||||||
|
margin: 0 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.layout-container > header > a.repo-url-github {
|
||||||
|
font-size: 0;
|
||||||
|
display: inline-block;
|
||||||
|
width: 20px;
|
||||||
|
height: 38px;
|
||||||
|
background: url("../image/github.png") no-repeat center;
|
||||||
|
background-size: 20px;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
|
||||||
|
.navigation {
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
box-sizing: border-box;
|
||||||
|
width: 250px;
|
||||||
|
height: 100%;
|
||||||
|
padding-top: 40px;
|
||||||
|
padding-left: 15px;
|
||||||
|
padding-bottom: 2em;
|
||||||
|
margin-top:1em;
|
||||||
|
overflow-x: scroll;
|
||||||
|
box-shadow: rgba(255, 255, 255, 1) -1px 0 0 inset;
|
||||||
|
border-right: 1px solid rgba(0, 0, 0, 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.navigation ul {
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.navigation li {
|
||||||
|
list-style: none;
|
||||||
|
margin: 4px 0;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.navigation .nav-dir-path {
|
||||||
|
margin-top: 0.7em;
|
||||||
|
margin-bottom: 0.25em;
|
||||||
|
font-size: 0.8em;
|
||||||
|
color: #aaa;
|
||||||
|
}
|
||||||
|
|
||||||
|
.kind-class,
|
||||||
|
.kind-interface,
|
||||||
|
.kind-function,
|
||||||
|
.kind-typedef,
|
||||||
|
.kind-variable,
|
||||||
|
.kind-external {
|
||||||
|
margin-left: 0.75em;
|
||||||
|
width: 1.2em;
|
||||||
|
height: 1.2em;
|
||||||
|
display: inline-block;
|
||||||
|
text-align: center;
|
||||||
|
border-radius: 0.2em;
|
||||||
|
margin-right: 0.2em;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
.kind-class {
|
||||||
|
color: #009800;
|
||||||
|
background-color: #bfe5bf;
|
||||||
|
}
|
||||||
|
|
||||||
|
.kind-interface {
|
||||||
|
color: #fbca04;
|
||||||
|
background-color: #fef2c0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.kind-function {
|
||||||
|
color: #6b0090;
|
||||||
|
background-color: #d6bdde;
|
||||||
|
}
|
||||||
|
|
||||||
|
.kind-variable {
|
||||||
|
color: #eb6420;
|
||||||
|
background-color: #fad8c7;
|
||||||
|
}
|
||||||
|
|
||||||
|
.kind-typedef {
|
||||||
|
color: #db001e;
|
||||||
|
background-color: #edbec3;
|
||||||
|
}
|
||||||
|
|
||||||
|
.kind-external {
|
||||||
|
color: #0738c3;
|
||||||
|
background-color: #bbcbea;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1 .version,
|
||||||
|
h1 .url a {
|
||||||
|
font-size: 14px;
|
||||||
|
color: #aaa;
|
||||||
|
}
|
||||||
|
|
||||||
|
.content {
|
||||||
|
margin-top: 40px;
|
||||||
|
margin-left: 250px;
|
||||||
|
padding: 10px 50px 10px 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-notice {
|
||||||
|
font-size: 14px;
|
||||||
|
color: #aaa;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.expression-extends .prettyprint {
|
||||||
|
margin-left: 10px;
|
||||||
|
background: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.extends-chain {
|
||||||
|
border-bottom: 1px solid#ddd;
|
||||||
|
padding-bottom: 10px;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.extends-chain span:nth-of-type(1) {
|
||||||
|
padding-left: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.extends-chain > div {
|
||||||
|
margin: 5px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.description table {
|
||||||
|
font-size: 14px;
|
||||||
|
border-spacing: 0;
|
||||||
|
border: 0;
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
|
||||||
|
.description thead {
|
||||||
|
background: #999;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.description table td,
|
||||||
|
.description table th {
|
||||||
|
border: solid 1px #ddd;
|
||||||
|
padding: 4px;
|
||||||
|
font-weight: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
.flat-list ul {
|
||||||
|
padding-left: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.flat-list li {
|
||||||
|
display: inline;
|
||||||
|
list-style: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.summary {
|
||||||
|
width: 100%;
|
||||||
|
margin: 10px 0;
|
||||||
|
border-spacing: 0;
|
||||||
|
border: 0;
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.summary thead {
|
||||||
|
background: #999;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.summary td {
|
||||||
|
border: solid 1px #ddd;
|
||||||
|
padding: 4px 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.summary tbody td:nth-child(1) {
|
||||||
|
text-align: right;
|
||||||
|
white-space: nowrap;
|
||||||
|
min-width: 64px;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.summary tbody td:nth-child(2) {
|
||||||
|
width: 100%;
|
||||||
|
border-right: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.summary tbody td:nth-child(3) {
|
||||||
|
white-space: nowrap;
|
||||||
|
border-left: none;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.summary td > div:nth-of-type(2) {
|
||||||
|
padding-top: 4px;
|
||||||
|
padding-left: 15px;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.summary td p {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.inherited-summary thead td {
|
||||||
|
padding-left: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.inherited-summary thead a {
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.inherited-summary .summary tbody {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.inherited-summary .summary .toggle {
|
||||||
|
padding: 0 4px;
|
||||||
|
font-size: 12px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
.inherited-summary .summary .toggle.closed:before {
|
||||||
|
content: "▶";
|
||||||
|
}
|
||||||
|
.inherited-summary .summary .toggle.opened:before {
|
||||||
|
content: "▼";
|
||||||
|
}
|
||||||
|
|
||||||
|
.member, .method {
|
||||||
|
margin-bottom: 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.params {
|
||||||
|
width: 100%;
|
||||||
|
margin: 10px 0;
|
||||||
|
border-spacing: 0;
|
||||||
|
border: 0;
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.params thead {
|
||||||
|
background: #eee;
|
||||||
|
color: #aaa;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.params td {
|
||||||
|
padding: 4px;
|
||||||
|
border: solid 1px #ddd;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.params td p {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.content .detail > * {
|
||||||
|
margin: 15px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.content .detail > h3 {
|
||||||
|
color: black;
|
||||||
|
}
|
||||||
|
|
||||||
|
.content .detail > div {
|
||||||
|
margin-left: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.content .detail > .import-path {
|
||||||
|
margin-top: -8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.content .detail + .detail {
|
||||||
|
margin-top: 30px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.content .detail .throw td:first-child {
|
||||||
|
padding-right: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.content .detail h4 + :not(pre) {
|
||||||
|
padding-left: 0;
|
||||||
|
margin-left: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.content .detail h4 + ul li {
|
||||||
|
list-style: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.return-param * {
|
||||||
|
display: inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
.argument-params {
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.return-type {
|
||||||
|
padding-right: 10px;
|
||||||
|
font-weight: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
.return-desc {
|
||||||
|
margin-left: 10px;
|
||||||
|
margin-top: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.return-desc p {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.deprecated, .experimental, .instance-docs {
|
||||||
|
border-left: solid 5px orange;
|
||||||
|
padding-left: 4px;
|
||||||
|
margin: 4px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
tr.listen p,
|
||||||
|
tr.throw p,
|
||||||
|
tr.emit p{
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version, .since {
|
||||||
|
color: #aaa;
|
||||||
|
}
|
||||||
|
|
||||||
|
h3 .right-info {
|
||||||
|
position: absolute;
|
||||||
|
right: 4px;
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.version + .since:before {
|
||||||
|
content: '| ';
|
||||||
|
}
|
||||||
|
|
||||||
|
.see {
|
||||||
|
margin-top: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.see h4 {
|
||||||
|
margin: 4px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.content .detail h4 + .example-doc {
|
||||||
|
margin: 6px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.example-caption {
|
||||||
|
position: relative;
|
||||||
|
bottom: -1px;
|
||||||
|
display: inline-block;
|
||||||
|
padding: 4px;
|
||||||
|
font-style: italic;
|
||||||
|
background-color: #f5f5f5;
|
||||||
|
font-weight: bold;
|
||||||
|
border-radius: 3px;
|
||||||
|
border-bottom-left-radius: 0;
|
||||||
|
border-bottom-right-radius: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.example-caption + pre.source-code {
|
||||||
|
margin-top: 0;
|
||||||
|
border-top-left-radius: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
footer, .file-footer {
|
||||||
|
text-align: right;
|
||||||
|
font-style: italic;
|
||||||
|
font-weight: 100;
|
||||||
|
font-size: 13px;
|
||||||
|
margin-right: 50px;
|
||||||
|
margin-left: 270px;
|
||||||
|
border-top: 1px solid #ddd;
|
||||||
|
padding-top: 30px;
|
||||||
|
margin-top: 20px;
|
||||||
|
padding-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre.source-code {
|
||||||
|
background: #f5f5f5;
|
||||||
|
padding: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre.raw-source-code > code {
|
||||||
|
padding: 0;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre.source-code.line-number {
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre.source-code ol {
|
||||||
|
background: #eee;
|
||||||
|
padding-left: 40px;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre.source-code li {
|
||||||
|
background: white;
|
||||||
|
padding-left: 4px;
|
||||||
|
list-style: decimal;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
pre.source-code.line-number li.active {
|
||||||
|
background: rgb(255, 255, 150);
|
||||||
|
}
|
||||||
|
|
||||||
|
pre.source-code.line-number li.error-line {
|
||||||
|
background: #ffb8bf;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.files-summary {
|
||||||
|
width: 100%;
|
||||||
|
margin: 10px 0;
|
||||||
|
border-spacing: 0;
|
||||||
|
border: 0;
|
||||||
|
border-collapse: collapse;
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.files-summary tbody tr:hover {
|
||||||
|
background: #eee;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.files-summary td:first-child,
|
||||||
|
table.files-summary td:nth-of-type(2) {
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.files-summary[data-use-coverage="false"] td.coverage {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.files-summary thead {
|
||||||
|
background: #999;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.files-summary td {
|
||||||
|
border: solid 1px #ddd;
|
||||||
|
padding: 4px 10px;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.files-summary td.identifiers > span {
|
||||||
|
display: block;
|
||||||
|
margin-top: 4px;
|
||||||
|
}
|
||||||
|
table.files-summary td.identifiers > span:first-child {
|
||||||
|
margin-top: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.files-summary .coverage-count {
|
||||||
|
font-size: 12px;
|
||||||
|
color: #aaa;
|
||||||
|
display: inline-block;
|
||||||
|
min-width: 40px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.total-coverage-count {
|
||||||
|
position: relative;
|
||||||
|
bottom: 2px;
|
||||||
|
font-size: 12px;
|
||||||
|
color: #666;
|
||||||
|
font-weight: 500;
|
||||||
|
padding-left: 5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.test-summary thead {
|
||||||
|
background: #999;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.test-summary thead .test-description {
|
||||||
|
width: 50%;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.test-summary {
|
||||||
|
width: 100%;
|
||||||
|
margin: 10px 0;
|
||||||
|
border-spacing: 0;
|
||||||
|
border: 0;
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.test-summary thead .test-count {
|
||||||
|
width: 3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.test-summary tbody tr:hover {
|
||||||
|
background-color: #eee;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.test-summary td {
|
||||||
|
border: solid 1px #ddd;
|
||||||
|
padding: 4px 10px;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.test-summary td p {
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.test-summary tr.test-describe .toggle {
|
||||||
|
display: inline-block;
|
||||||
|
float: left;
|
||||||
|
margin-right: 4px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
table.test-summary tr.test-describe .toggle.opened:before {
|
||||||
|
content: '▼';
|
||||||
|
}
|
||||||
|
|
||||||
|
table.test-summary tr.test-describe .toggle.closed:before {
|
||||||
|
content: '▶';
|
||||||
|
}
|
||||||
|
|
||||||
|
table.test-summary .test-target > span {
|
||||||
|
display: block;
|
||||||
|
margin-top: 4px;
|
||||||
|
}
|
||||||
|
table.test-summary .test-target > span:first-child {
|
||||||
|
margin-top: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.inner-link-active {
|
||||||
|
background: rgb(255, 255, 150);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* search box */
|
||||||
|
.search-box {
|
||||||
|
position: absolute;
|
||||||
|
top: 10px;
|
||||||
|
right: 50px;
|
||||||
|
padding-right: 8px;
|
||||||
|
padding-bottom: 10px;
|
||||||
|
line-height: normal;
|
||||||
|
font-size: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-box img {
|
||||||
|
width: 20px;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-input {
|
||||||
|
display: inline;
|
||||||
|
visibility: hidden;
|
||||||
|
width: 0;
|
||||||
|
padding: 2px;
|
||||||
|
height: 1.5em;
|
||||||
|
outline: none;
|
||||||
|
background: transparent;
|
||||||
|
border: 1px #0af;
|
||||||
|
border-style: none none solid none;
|
||||||
|
vertical-align: bottom;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-input-edge {
|
||||||
|
display: none;
|
||||||
|
width: 1px;
|
||||||
|
height: 5px;
|
||||||
|
background-color: #0af;
|
||||||
|
vertical-align: bottom;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-result {
|
||||||
|
position: absolute;
|
||||||
|
display: none;
|
||||||
|
height: 600px;
|
||||||
|
width: 100%;
|
||||||
|
padding: 0;
|
||||||
|
margin-top: 5px;
|
||||||
|
margin-left: 24px;
|
||||||
|
background: white;
|
||||||
|
box-shadow: 1px 1px 4px rgb(0,0,0);
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow-y: scroll;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-result-import-path {
|
||||||
|
color: #aaa;
|
||||||
|
font-size: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-result li {
|
||||||
|
list-style: none;
|
||||||
|
padding: 2px 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-result li a {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-result li.selected {
|
||||||
|
background: #ddd;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-result li.search-separator {
|
||||||
|
background: rgb(37, 138, 175);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-box.active .search-input {
|
||||||
|
visibility: visible;
|
||||||
|
transition: width 0.2s ease-out;
|
||||||
|
width: 300px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-box.active .search-input-edge {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* coverage badge */
|
||||||
|
.esdoc-coverage {
|
||||||
|
display: inline-block;
|
||||||
|
height: 20px;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1 .esdoc-coverage {
|
||||||
|
position: relative;
|
||||||
|
top: -4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.esdoc-coverage-wrap {
|
||||||
|
color: white;
|
||||||
|
font-size: 12px;
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
.esdoc-coverage-label {
|
||||||
|
padding: 3px 4px 3px 6px;
|
||||||
|
background: linear-gradient(to bottom, #5e5e5e 0%,#4c4c4c 100%);
|
||||||
|
border-radius: 4px 0 0 4px;
|
||||||
|
display: inline-block;
|
||||||
|
height: 20px;
|
||||||
|
box-sizing: border-box;
|
||||||
|
line-height: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.esdoc-coverage-ratio {
|
||||||
|
padding: 3px 6px 3px 4px;
|
||||||
|
border-radius: 0 4px 4px 0;
|
||||||
|
display: inline-block;
|
||||||
|
height: 20px;
|
||||||
|
box-sizing: border-box;
|
||||||
|
line-height: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.esdoc-coverage-low {
|
||||||
|
background: linear-gradient(to bottom, #db654f 0%,#c9533d 100%);
|
||||||
|
}
|
||||||
|
|
||||||
|
.esdoc-coverage-middle {
|
||||||
|
background: linear-gradient(to bottom, #dab226 0%,#c9a179 100%);
|
||||||
|
}
|
||||||
|
|
||||||
|
.esdoc-coverage-high {
|
||||||
|
background: linear-gradient(to bottom, #4fc921 0%,#3eb810 100%);
|
||||||
|
}
|
||||||
|
|
||||||
|
.github-markdown .manual-toc {
|
||||||
|
padding-left: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** manual */
|
||||||
|
|
||||||
|
.manual-root .navigation {
|
||||||
|
padding-left: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.navigation .manual-toc-title {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0.5em 0 0.5em 1em;
|
||||||
|
border: none;
|
||||||
|
font-size: 1em;
|
||||||
|
font-weight: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
.navigation .manual-toc-title:first-child {
|
||||||
|
margin-top: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.navigation .manual-toc {
|
||||||
|
display: none;
|
||||||
|
margin-left: 0.5em;
|
||||||
|
margin-top: -0.25em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github-markdown .manual-toc-title a {
|
||||||
|
color: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
|
.manual-breadcrumb-list {
|
||||||
|
font-size: 0.8em;
|
||||||
|
margin-bottom: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.manual-toc-title a:hover {
|
||||||
|
color: #039BE5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.manual-toc li {
|
||||||
|
margin: 0.75em 0;
|
||||||
|
list-style-type: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.manual-toc .indent-h1 {
|
||||||
|
margin-left: 0;
|
||||||
|
}
|
||||||
|
.manual-toc .indent-h2 {
|
||||||
|
margin-left: 1em;
|
||||||
|
}
|
||||||
|
.manual-toc .indent-h3 {
|
||||||
|
margin-left: 3em;
|
||||||
|
}
|
||||||
|
.manual-toc .indent-h4 {
|
||||||
|
margin-left: 4em;
|
||||||
|
}
|
||||||
|
.manual-toc .indent-h5 {
|
||||||
|
margin-left: 5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.manual-nav li {
|
||||||
|
margin: 0.75em 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.manual-dot {
|
||||||
|
margin-left: 0.75em;
|
||||||
|
width: 0.6em;
|
||||||
|
height: 0.6em;
|
||||||
|
display: inline-block;
|
||||||
|
border-radius: 0.3em;
|
||||||
|
margin-right: 0.3em;
|
||||||
|
background-color: #bfe5bf;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* github markdown */
|
||||||
|
.github-markdown {
|
||||||
|
font-size: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github-markdown h1,
|
||||||
|
.github-markdown h2,
|
||||||
|
.github-markdown h3,
|
||||||
|
.github-markdown h4,
|
||||||
|
.github-markdown h5 {
|
||||||
|
margin-top: 1em;
|
||||||
|
margin-bottom: 16px;
|
||||||
|
font-weight: bold;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github-markdown h1:nth-of-type(1) {
|
||||||
|
margin-top: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github-markdown h1 {
|
||||||
|
font-size: 2em;
|
||||||
|
padding-bottom: 0.3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github-markdown h2 {
|
||||||
|
font-size: 1.75em;
|
||||||
|
padding-bottom: 0.3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github-markdown h3 {
|
||||||
|
font-size: 1.5em;
|
||||||
|
background-color: transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github-markdown h4 {
|
||||||
|
font-size: 1.25em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github-markdown h5 {
|
||||||
|
font-size: 1em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github-markdown ul, .github-markdown ol {
|
||||||
|
padding-left: 2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github-markdown pre > code {
|
||||||
|
font-size: 0.85em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github-markdown table {
|
||||||
|
margin-bottom: 1em;
|
||||||
|
border-collapse: collapse;
|
||||||
|
border-spacing: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github-markdown table tr {
|
||||||
|
background-color: #fff;
|
||||||
|
border-top: 1px solid #ccc;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github-markdown table th,
|
||||||
|
.github-markdown table td {
|
||||||
|
padding: 6px 13px;
|
||||||
|
border: 1px solid #ddd;
|
||||||
|
}
|
||||||
|
|
||||||
|
.github-markdown table tr:nth-child(2n) {
|
||||||
|
background-color: #f8f8f8;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** badge(.svg) does not have border */
|
||||||
|
.github-markdown img:not([src*=".svg"]) {
|
||||||
|
max-width: 100%;
|
||||||
|
box-shadow: 1px 1px 1px rgba(0,0,0,0.5);
|
||||||
|
}
|
|
@ -1,260 +0,0 @@
|
||||||
# BucketInfo Model Version History
|
|
||||||
|
|
||||||
## Model Version 0/1
|
|
||||||
|
|
||||||
### Properties
|
|
||||||
|
|
||||||
``` javascript
|
|
||||||
this._acl = aclInstance;
|
|
||||||
this._name = name;
|
|
||||||
this._owner = owner;
|
|
||||||
this._ownerDisplayName = ownerDisplayName;
|
|
||||||
this._creationDate = creationDate;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
No explicit references in the code since mdBucketModelVersion
|
|
||||||
property not added until Model Version 2
|
|
||||||
|
|
||||||
## Model Version 2
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
``` javascript
|
|
||||||
this._mdBucketModelVersion = mdBucketModelVersion || 0
|
|
||||||
this._transient = transient || false;
|
|
||||||
this._deleted = deleted || false;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to determine which splitter to use ( < 2 means old splitter)
|
|
||||||
|
|
||||||
## Model version 3
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```
|
|
||||||
this._serverSideEncryption = serverSideEncryption || null;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to store the server bucket encryption info
|
|
||||||
|
|
||||||
## Model version 4
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._locationConstraint = LocationConstraint || null;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to store the location constraint of the bucket
|
|
||||||
|
|
||||||
## Model version 5
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._websiteConfiguration = websiteConfiguration || null;
|
|
||||||
this._cors = cors || null;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to store the bucket website configuration info
|
|
||||||
and to store CORS rules to apply to cross-domain requests
|
|
||||||
|
|
||||||
## Model version 6
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._lifecycleConfiguration = lifecycleConfiguration || null;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to store the bucket lifecycle configuration info
|
|
||||||
|
|
||||||
## Model version 7
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._uid = uid || uuid();
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to set a unique identifier on a bucket
|
|
||||||
|
|
||||||
## Model version 8
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._readLocationConstraint = readLocationConstraint || null;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to store default read location of the bucket
|
|
||||||
|
|
||||||
## Model version 9
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._isNFS = isNFS || null;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to determine whether the bucket may be accessed through NFS
|
|
||||||
|
|
||||||
## Model version 10
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._ingestion = ingestionConfig || null;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to store the ingestion status of a bucket
|
|
||||||
|
|
||||||
## Model version 11
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._azureInfo = azureInfo || null;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to store Azure storage account specific information
|
|
||||||
|
|
||||||
## Model version 12
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._objectLockEnabled = objectLockEnabled || false;
|
|
||||||
this._objectLockConfiguration = objectLockConfiguration || null;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to determine whether object lock capabilities are enabled on a bucket and
|
|
||||||
to store the object lock configuration of the bucket
|
|
||||||
|
|
||||||
## Model version 13
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._notificationConfiguration = notificationConfiguration || null;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to store the bucket notification configuration info
|
|
||||||
|
|
||||||
## Model version 14
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._serverSideEncryption.configuredMasterKeyId = configuredMasterKeyId || undefined;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to store the users configured KMS key id
|
|
||||||
|
|
||||||
## Model version 15
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._tags = tags || null;
|
|
||||||
```
|
|
||||||
|
|
||||||
The Tag Set of a bucket is an array of objects with Key and Value:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
[
|
|
||||||
{
|
|
||||||
Key: 'something',
|
|
||||||
Value: 'some_data'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
```
|
|
||||||
|
|
||||||
## Model version 16
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._capabilities = capabilities || undefined;
|
|
||||||
```
|
|
||||||
|
|
||||||
For capacity-enabled buckets, contains the following data:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
{
|
|
||||||
_capabilities: {
|
|
||||||
VeeamSOSApi?: {
|
|
||||||
SystemInfo?: {
|
|
||||||
ProtocolVersion: String,
|
|
||||||
ModelName: String,
|
|
||||||
ProtocolCapabilities: {
|
|
||||||
CapacityInfo: Boolean,
|
|
||||||
UploadSessions: Boolean,
|
|
||||||
IAMSTS: Boolean,
|
|
||||||
},
|
|
||||||
APIEndpoints: {
|
|
||||||
IAMEndpoint: String,
|
|
||||||
STSEndpoint: String,
|
|
||||||
},
|
|
||||||
SystemRecommendations?: {
|
|
||||||
S3ConcurrentTaskLimit: Number,
|
|
||||||
S3MultiObjectDelete: Number,
|
|
||||||
StorageCurrentTasksLimit: Number,
|
|
||||||
KbBlockSize: Number,
|
|
||||||
}
|
|
||||||
LastModified?: String,
|
|
||||||
},
|
|
||||||
CapacityInfo?: {
|
|
||||||
Capacity: Number,
|
|
||||||
Available: Number,
|
|
||||||
Used: Number,
|
|
||||||
LastModified?: String,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to store bucket tagging
|
|
||||||
|
|
||||||
## Model version 17
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._quotaMax = quotaMax || 0;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to store bucket quota
|
|
|
@ -1,27 +0,0 @@
|
||||||
# Delimiter
|
|
||||||
|
|
||||||
The Delimiter class handles raw listings from the database with an
|
|
||||||
optional delimiter, and fills in a curated listing with "Contents" and
|
|
||||||
"CommonPrefixes" as a result.
|
|
||||||
|
|
||||||
## Expected Behavior
|
|
||||||
|
|
||||||
- only lists keys belonging to the given **prefix** (if provided)
|
|
||||||
|
|
||||||
- groups listed keys that have a common prefix ending with a delimiter
|
|
||||||
inside CommonPrefixes
|
|
||||||
|
|
||||||
- can take a **marker** or **continuationToken** to list from a specific key
|
|
||||||
|
|
||||||
- can take a **maxKeys** parameter to limit how many keys can be returned
|
|
||||||
|
|
||||||
## State Chart
|
|
||||||
|
|
||||||
- States with grey background are *Idle* states, which are waiting for
|
|
||||||
a new listing key
|
|
||||||
|
|
||||||
- States with blue background are *Processing* states, which are
|
|
||||||
actively processing a new listing key passed by the filter()
|
|
||||||
function
|
|
||||||
|
|
||||||
![Delimiter State Chart](./pics/delimiterStateChart.svg)
|
|
|
@ -1,45 +0,0 @@
|
||||||
# DelimiterMaster
|
|
||||||
|
|
||||||
The DelimiterMaster class handles raw listings from the database of a
|
|
||||||
versioned or non-versioned bucket with an optional delimiter, and
|
|
||||||
fills in a curated listing with "Contents" and "CommonPrefixes" as a
|
|
||||||
result.
|
|
||||||
|
|
||||||
## Expected Behavior
|
|
||||||
|
|
||||||
- only lists latest versions of versioned buckets
|
|
||||||
|
|
||||||
- only lists keys belonging to the given **prefix** (if provided)
|
|
||||||
|
|
||||||
- does not list latest versions that are delete markers
|
|
||||||
|
|
||||||
- groups listed keys that have a common prefix ending with a delimiter
|
|
||||||
inside CommonPrefixes
|
|
||||||
|
|
||||||
- can take a **marker** or **continuationToken** to list from a specific key
|
|
||||||
|
|
||||||
- can take a **maxKeys** parameter to limit how many keys can be returned
|
|
||||||
|
|
||||||
- reconciles internal PHD keys with the next version (those are
|
|
||||||
created when a specific version that is the latest version is
|
|
||||||
deleted)
|
|
||||||
|
|
||||||
- skips internal keys like replay keys
|
|
||||||
|
|
||||||
## State Chart
|
|
||||||
|
|
||||||
- States with grey background are *Idle* states, which are waiting for
|
|
||||||
a new listing key
|
|
||||||
|
|
||||||
- States with blue background are *Processing* states, which are
|
|
||||||
actively processing a new listing key passed by the filter()
|
|
||||||
function
|
|
||||||
|
|
||||||
### Bucket Vformat=v0
|
|
||||||
|
|
||||||
![DelimiterMaster State Chart for v0 format](./pics/delimiterMasterV0StateChart.svg)
|
|
||||||
|
|
||||||
### Bucket Vformat=v1
|
|
||||||
|
|
||||||
For buckets in versioning key format **v1**, the algorithm used is the
|
|
||||||
one from [Delimiter](delimiter.md).
|
|
|
@ -1,33 +0,0 @@
|
||||||
# DelimiterVersions
|
|
||||||
|
|
||||||
The DelimiterVersions class handles raw listings from the database of a
|
|
||||||
versioned or non-versioned bucket with an optional delimiter, and
|
|
||||||
fills in a curated listing with "Versions" and "CommonPrefixes" as a
|
|
||||||
result.
|
|
||||||
|
|
||||||
## Expected Behavior
|
|
||||||
|
|
||||||
- lists individual distinct versions of versioned buckets
|
|
||||||
|
|
||||||
- only lists keys belonging to the given **prefix** (if provided)
|
|
||||||
|
|
||||||
- groups listed keys that have a common prefix ending with a delimiter
|
|
||||||
inside CommonPrefixes
|
|
||||||
|
|
||||||
- can take a **keyMarker** and optionally a **versionIdMarker** to
|
|
||||||
list from a specific key or version
|
|
||||||
|
|
||||||
- can take a **maxKeys** parameter to limit how many keys can be returned
|
|
||||||
|
|
||||||
- skips internal keys like replay keys
|
|
||||||
|
|
||||||
## State Chart
|
|
||||||
|
|
||||||
- States with grey background are *Idle* states, which are waiting for
|
|
||||||
a new listing key
|
|
||||||
|
|
||||||
- States with blue background are *Processing* states, which are
|
|
||||||
actively processing a new listing key passed by the filter()
|
|
||||||
function
|
|
||||||
|
|
||||||
![DelimiterVersions State Chart](./pics/delimiterVersionsStateChart.svg)
|
|
|
@ -1,45 +0,0 @@
|
||||||
digraph {
|
|
||||||
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
|
||||||
edge [fontsize=14];
|
|
||||||
rankdir=TB;
|
|
||||||
|
|
||||||
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
|
||||||
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
|
||||||
|
|
||||||
node [fillcolor="lightgrey"];
|
|
||||||
"NotSkippingPrefixNorVersions.Idle" [label="NotSkippingPrefixNorVersions",group="NotSkippingPrefixNorVersions",width=4];
|
|
||||||
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
|
||||||
"SkippingVersions.Idle" [label="SkippingVersions",group="SkippingVersions"];
|
|
||||||
"WaitVersionAfterPHD.Idle" [label="WaitVersionAfterPHD",group="WaitVersionAfterPHD"];
|
|
||||||
|
|
||||||
node [fillcolor="lightblue"];
|
|
||||||
"NotSkippingPrefixNorVersions.Processing" [label="NotSkippingPrefixNorVersions",group="NotSkippingPrefixNorVersions",width=4];
|
|
||||||
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
|
||||||
"SkippingVersions.Processing" [label="SkippingVersions",group="SkippingVersions"];
|
|
||||||
"WaitVersionAfterPHD.Processing" [label="WaitVersionAfterPHD",group="WaitVersionAfterPHD"];
|
|
||||||
|
|
||||||
START -> "SkippingVersions.Idle" [label="[marker != undefined]"]
|
|
||||||
START -> "NotSkippingPrefixNorVersions.Idle" [label="[marker == undefined]"]
|
|
||||||
|
|
||||||
"NotSkippingPrefixNorVersions.Idle" -> "NotSkippingPrefixNorVersions.Processing" [label="filter(key, value)"]
|
|
||||||
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
|
||||||
"SkippingVersions.Idle" -> "SkippingVersions.Processing" [label="filter(key, value)"]
|
|
||||||
"WaitVersionAfterPHD.Idle" -> "WaitVersionAfterPHD.Processing" [label="filter(key, value)"]
|
|
||||||
|
|
||||||
|
|
||||||
"NotSkippingPrefixNorVersions.Processing" -> "SkippingVersions.Idle" [label="[Version.isDeleteMarker(value)]\n-> FILTER_ACCEPT"]
|
|
||||||
"NotSkippingPrefixNorVersions.Processing" -> "WaitVersionAfterPHD.Idle" [label="[Version.isPHD(value)]\n-> FILTER_ACCEPT"]
|
|
||||||
"NotSkippingPrefixNorVersions.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(<ReplayPrefix>)]\n/ prefix <- <ReplayPrefix>\n-> FILTER_SKIP"]
|
|
||||||
"NotSkippingPrefixNorVersions.Processing" -> END [label="[isListableKey(key, value) and\nKeys == maxKeys]\n-> FILTER_END"]
|
|
||||||
"NotSkippingPrefixNorVersions.Processing" -> "SkippingPrefix.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nhasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
|
||||||
"NotSkippingPrefixNorVersions.Processing" -> "SkippingVersions.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nnot hasDelimiter(key)]\n/ Contents.append(key, value)\n-> FILTER_ACCEPT"]
|
|
||||||
|
|
||||||
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
|
||||||
"SkippingPrefix.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not key.startsWith(prefix)]"]
|
|
||||||
|
|
||||||
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="[isVersionKey(key)]\n-> FILTER_SKIP"]
|
|
||||||
"SkippingVersions.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not isVersionKey(key)]"]
|
|
||||||
|
|
||||||
"WaitVersionAfterPHD.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[isVersionKey(key) and master(key) == PHDkey]\n/ key <- master(key)"]
|
|
||||||
"WaitVersionAfterPHD.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not isVersionKey(key) or master(key) != PHDkey]"]
|
|
||||||
}
|
|
|
@ -1,216 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
|
||||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
|
||||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
|
||||||
<!-- Generated by graphviz version 2.43.0 (0)
|
|
||||||
-->
|
|
||||||
<!-- Title: %3 Pages: 1 -->
|
|
||||||
<svg width="2313pt" height="460pt"
|
|
||||||
viewBox="0.00 0.00 2313.37 460.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
|
||||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 456)">
|
|
||||||
<title>%3</title>
|
|
||||||
<polygon fill="white" stroke="transparent" points="-4,4 -4,-456 2309.37,-456 2309.37,4 -4,4"/>
|
|
||||||
<!-- START -->
|
|
||||||
<g id="node1" class="node">
|
|
||||||
<title>START</title>
|
|
||||||
<ellipse fill="black" stroke="black" cx="35.37" cy="-445" rx="7" ry="7"/>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Idle -->
|
|
||||||
<g id="node3" class="node">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M925.37,-387C925.37,-387 661.37,-387 661.37,-387 655.37,-387 649.37,-381 649.37,-375 649.37,-375 649.37,-363 649.37,-363 649.37,-357 655.37,-351 661.37,-351 661.37,-351 925.37,-351 925.37,-351 931.37,-351 937.37,-357 937.37,-363 937.37,-363 937.37,-375 937.37,-375 937.37,-381 931.37,-387 925.37,-387"/>
|
|
||||||
<text text-anchor="middle" x="793.37" y="-365.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefixNorVersions</text>
|
|
||||||
</g>
|
|
||||||
<!-- START->NotSkippingPrefixNorVersions.Idle -->
|
|
||||||
<g id="edge2" class="edge">
|
|
||||||
<title>START->NotSkippingPrefixNorVersions.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M42.39,-443.31C95.3,-438.15 434.98,-404.99 638.94,-385.08"/>
|
|
||||||
<polygon fill="black" stroke="black" points="639.54,-388.53 649.15,-384.08 638.86,-381.57 639.54,-388.53"/>
|
|
||||||
<text text-anchor="middle" x="497.87" y="-408.8" font-family="Times,serif" font-size="14.00">[marker == undefined]</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingVersions.Idle -->
|
|
||||||
<g id="node5" class="node">
|
|
||||||
<title>SkippingVersions.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M242.37,-138C242.37,-138 50.37,-138 50.37,-138 44.37,-138 38.37,-132 38.37,-126 38.37,-126 38.37,-114 38.37,-114 38.37,-108 44.37,-102 50.37,-102 50.37,-102 242.37,-102 242.37,-102 248.37,-102 254.37,-108 254.37,-114 254.37,-114 254.37,-126 254.37,-126 254.37,-132 248.37,-138 242.37,-138"/>
|
|
||||||
<text text-anchor="middle" x="146.37" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
|
||||||
</g>
|
|
||||||
<!-- START->SkippingVersions.Idle -->
|
|
||||||
<g id="edge1" class="edge">
|
|
||||||
<title>START->SkippingVersions.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M33.04,-438.14C20.64,-405.9 -34.57,-248.17 33.37,-156 36.76,-151.4 40.74,-147.39 45.16,-143.89"/>
|
|
||||||
<polygon fill="black" stroke="black" points="47.27,-146.68 53.53,-138.13 43.3,-140.92 47.27,-146.68"/>
|
|
||||||
<text text-anchor="middle" x="85.87" y="-321.8" font-family="Times,serif" font-size="14.00">[marker != undefined]</text>
|
|
||||||
</g>
|
|
||||||
<!-- END -->
|
|
||||||
<g id="node2" class="node">
|
|
||||||
<title>END</title>
|
|
||||||
<ellipse fill="black" stroke="black" cx="727.37" cy="-120" rx="7" ry="7"/>
|
|
||||||
<ellipse fill="none" stroke="black" cx="727.37" cy="-120" rx="11" ry="11"/>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Processing -->
|
|
||||||
<g id="node7" class="node">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M925.37,-300C925.37,-300 661.37,-300 661.37,-300 655.37,-300 649.37,-294 649.37,-288 649.37,-288 649.37,-276 649.37,-276 649.37,-270 655.37,-264 661.37,-264 661.37,-264 925.37,-264 925.37,-264 931.37,-264 937.37,-270 937.37,-276 937.37,-276 937.37,-288 937.37,-288 937.37,-294 931.37,-300 925.37,-300"/>
|
|
||||||
<text text-anchor="middle" x="793.37" y="-278.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefixNorVersions</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Idle->NotSkippingPrefixNorVersions.Processing -->
|
|
||||||
<g id="edge3" class="edge">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Idle->NotSkippingPrefixNorVersions.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M793.37,-350.8C793.37,-339.16 793.37,-323.55 793.37,-310.24"/>
|
|
||||||
<polygon fill="black" stroke="black" points="796.87,-310.18 793.37,-300.18 789.87,-310.18 796.87,-310.18"/>
|
|
||||||
<text text-anchor="middle" x="851.37" y="-321.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Idle -->
|
|
||||||
<g id="node4" class="node">
|
|
||||||
<title>SkippingPrefix.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M1209.37,-138C1209.37,-138 1017.37,-138 1017.37,-138 1011.37,-138 1005.37,-132 1005.37,-126 1005.37,-126 1005.37,-114 1005.37,-114 1005.37,-108 1011.37,-102 1017.37,-102 1017.37,-102 1209.37,-102 1209.37,-102 1215.37,-102 1221.37,-108 1221.37,-114 1221.37,-114 1221.37,-126 1221.37,-126 1221.37,-132 1215.37,-138 1209.37,-138"/>
|
|
||||||
<text text-anchor="middle" x="1113.37" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Processing -->
|
|
||||||
<g id="node8" class="node">
|
|
||||||
<title>SkippingPrefix.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M1070.37,-36C1070.37,-36 878.37,-36 878.37,-36 872.37,-36 866.37,-30 866.37,-24 866.37,-24 866.37,-12 866.37,-12 866.37,-6 872.37,0 878.37,0 878.37,0 1070.37,0 1070.37,0 1076.37,0 1082.37,-6 1082.37,-12 1082.37,-12 1082.37,-24 1082.37,-24 1082.37,-30 1076.37,-36 1070.37,-36"/>
|
|
||||||
<text text-anchor="middle" x="974.37" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
|
||||||
<g id="edge4" class="edge">
|
|
||||||
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M1011.89,-101.96C994.96,-97.13 981.04,-91.17 975.37,-84 967.11,-73.56 966.25,-58.93 967.72,-46.2"/>
|
|
||||||
<polygon fill="black" stroke="black" points="971.22,-46.52 969.4,-36.09 964.31,-45.38 971.22,-46.52"/>
|
|
||||||
<text text-anchor="middle" x="1033.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingVersions.Processing -->
|
|
||||||
<g id="node9" class="node">
|
|
||||||
<title>SkippingVersions.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M381.37,-36C381.37,-36 189.37,-36 189.37,-36 183.37,-36 177.37,-30 177.37,-24 177.37,-24 177.37,-12 177.37,-12 177.37,-6 183.37,0 189.37,0 189.37,0 381.37,0 381.37,0 387.37,0 393.37,-6 393.37,-12 393.37,-12 393.37,-24 393.37,-24 393.37,-30 387.37,-36 381.37,-36"/>
|
|
||||||
<text text-anchor="middle" x="285.37" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingVersions.Idle->SkippingVersions.Processing -->
|
|
||||||
<g id="edge5" class="edge">
|
|
||||||
<title>SkippingVersions.Idle->SkippingVersions.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M141.4,-101.91C138.35,-87.58 136.8,-67.37 147.37,-54 151.89,-48.28 161.64,-43.34 173.99,-39.12"/>
|
|
||||||
<polygon fill="black" stroke="black" points="175.39,-42.36 183.89,-36.04 173.3,-35.67 175.39,-42.36"/>
|
|
||||||
<text text-anchor="middle" x="205.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- WaitVersionAfterPHD.Idle -->
|
|
||||||
<g id="node6" class="node">
|
|
||||||
<title>WaitVersionAfterPHD.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M1534.37,-138C1534.37,-138 1342.37,-138 1342.37,-138 1336.37,-138 1330.37,-132 1330.37,-126 1330.37,-126 1330.37,-114 1330.37,-114 1330.37,-108 1336.37,-102 1342.37,-102 1342.37,-102 1534.37,-102 1534.37,-102 1540.37,-102 1546.37,-108 1546.37,-114 1546.37,-114 1546.37,-126 1546.37,-126 1546.37,-132 1540.37,-138 1534.37,-138"/>
|
|
||||||
<text text-anchor="middle" x="1438.37" y="-116.2" font-family="Times,serif" font-size="16.00">WaitVersionAfterPHD</text>
|
|
||||||
</g>
|
|
||||||
<!-- WaitVersionAfterPHD.Processing -->
|
|
||||||
<g id="node10" class="node">
|
|
||||||
<title>WaitVersionAfterPHD.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M1534.37,-36C1534.37,-36 1342.37,-36 1342.37,-36 1336.37,-36 1330.37,-30 1330.37,-24 1330.37,-24 1330.37,-12 1330.37,-12 1330.37,-6 1336.37,0 1342.37,0 1342.37,0 1534.37,0 1534.37,0 1540.37,0 1546.37,-6 1546.37,-12 1546.37,-12 1546.37,-24 1546.37,-24 1546.37,-30 1540.37,-36 1534.37,-36"/>
|
|
||||||
<text text-anchor="middle" x="1438.37" y="-14.2" font-family="Times,serif" font-size="16.00">WaitVersionAfterPHD</text>
|
|
||||||
</g>
|
|
||||||
<!-- WaitVersionAfterPHD.Idle->WaitVersionAfterPHD.Processing -->
|
|
||||||
<g id="edge6" class="edge">
|
|
||||||
<title>WaitVersionAfterPHD.Idle->WaitVersionAfterPHD.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M1438.37,-101.58C1438.37,-86.38 1438.37,-64.07 1438.37,-46.46"/>
|
|
||||||
<polygon fill="black" stroke="black" points="1441.87,-46.22 1438.37,-36.22 1434.87,-46.22 1441.87,-46.22"/>
|
|
||||||
<text text-anchor="middle" x="1496.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Processing->END -->
|
|
||||||
<g id="edge10" class="edge">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Processing->END</title>
|
|
||||||
<path fill="none" stroke="black" d="M649.15,-273.62C611.7,-268.54 578.44,-260.07 566.37,-246 540.33,-215.64 540,-186.08 566.37,-156 586.46,-133.07 673.88,-148.86 702.37,-138 705.22,-136.91 708.06,-135.44 710.76,-133.82"/>
|
|
||||||
<polygon fill="black" stroke="black" points="712.88,-136.61 719.13,-128.05 708.91,-130.84 712.88,-136.61"/>
|
|
||||||
<text text-anchor="middle" x="672.87" y="-212.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
|
||||||
<text text-anchor="middle" x="672.87" y="-197.3" font-family="Times,serif" font-size="14.00">Keys == maxKeys]</text>
|
|
||||||
<text text-anchor="middle" x="672.87" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle -->
|
|
||||||
<g id="edge9" class="edge">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M937.6,-274.31C1018.89,-269.01 1106.69,-260.11 1119.37,-246 1143.16,-219.51 1134.03,-175.72 1124.38,-147.62"/>
|
|
||||||
<polygon fill="black" stroke="black" points="1127.6,-146.22 1120.86,-138.04 1121.03,-148.64 1127.6,-146.22"/>
|
|
||||||
<text text-anchor="middle" x="1254.37" y="-212.3" font-family="Times,serif" font-size="14.00">[key.startsWith(<ReplayPrefix>)]</text>
|
|
||||||
<text text-anchor="middle" x="1254.37" y="-197.3" font-family="Times,serif" font-size="14.00">/ prefix <- <ReplayPrefix></text>
|
|
||||||
<text text-anchor="middle" x="1254.37" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle -->
|
|
||||||
<g id="edge11" class="edge">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M799.18,-263.65C800.96,-258.05 802.85,-251.79 804.37,-246 814.73,-206.45 793.03,-183.41 823.37,-156 851.23,-130.83 954.1,-142.59 991.37,-138 992.65,-137.84 993.94,-137.68 995.24,-137.52"/>
|
|
||||||
<polygon fill="black" stroke="black" points="995.81,-140.98 1005.29,-136.25 994.93,-134.03 995.81,-140.98"/>
|
|
||||||
<text text-anchor="middle" x="969.37" y="-234.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
|
||||||
<text text-anchor="middle" x="969.37" y="-219.8" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
|
||||||
<text text-anchor="middle" x="969.37" y="-204.8" font-family="Times,serif" font-size="14.00">hasDelimiter(key)]</text>
|
|
||||||
<text text-anchor="middle" x="969.37" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
|
||||||
<text text-anchor="middle" x="969.37" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
|
||||||
<text text-anchor="middle" x="969.37" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle -->
|
|
||||||
<g id="edge7" class="edge">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M649.11,-279.23C439.56,-275.94 73.58,-267.19 53.37,-246 25.76,-217.06 30.6,-188.89 53.37,-156 56.56,-151.39 60.44,-147.39 64.78,-143.91"/>
|
|
||||||
<polygon fill="black" stroke="black" points="66.8,-146.76 73.04,-138.2 62.83,-141 66.8,-146.76"/>
|
|
||||||
<text text-anchor="middle" x="167.87" y="-204.8" font-family="Times,serif" font-size="14.00">[Version.isDeleteMarker(value)]</text>
|
|
||||||
<text text-anchor="middle" x="167.87" y="-189.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle -->
|
|
||||||
<g id="edge12" class="edge">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M649.33,-279.1C514.97,-275.99 331.4,-267.75 305.37,-246 273.69,-219.53 311.53,-185.22 282.37,-156 276.73,-150.36 270.32,-145.59 263.42,-141.56"/>
|
|
||||||
<polygon fill="black" stroke="black" points="264.92,-138.39 254.44,-136.84 261.67,-144.59 264.92,-138.39"/>
|
|
||||||
<text text-anchor="middle" x="411.87" y="-227.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
|
||||||
<text text-anchor="middle" x="411.87" y="-212.3" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
|
||||||
<text text-anchor="middle" x="411.87" y="-197.3" font-family="Times,serif" font-size="14.00">not hasDelimiter(key)]</text>
|
|
||||||
<text text-anchor="middle" x="411.87" y="-182.3" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
|
||||||
<text text-anchor="middle" x="411.87" y="-167.3" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Processing->WaitVersionAfterPHD.Idle -->
|
|
||||||
<g id="edge8" class="edge">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Processing->WaitVersionAfterPHD.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M937.38,-280.87C1099.43,-279.42 1344.59,-272.74 1378.37,-246 1411.11,-220.08 1384.48,-192.16 1405.37,-156 1407.38,-152.52 1409.8,-149.11 1412.4,-145.87"/>
|
|
||||||
<polygon fill="black" stroke="black" points="1415.16,-148.04 1419.13,-138.21 1409.9,-143.41 1415.16,-148.04"/>
|
|
||||||
<text text-anchor="middle" x="1486.87" y="-204.8" font-family="Times,serif" font-size="14.00">[Version.isPHD(value)]</text>
|
|
||||||
<text text-anchor="middle" x="1486.87" y="-189.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
|
||||||
<g id="edge13" class="edge">
|
|
||||||
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M1064.61,-36.08C1074.44,-40.7 1083.66,-46.57 1091.37,-54 1101.65,-63.92 1107.13,-78.81 1110.04,-91.84"/>
|
|
||||||
<polygon fill="black" stroke="black" points="1106.62,-92.56 1111.88,-101.76 1113.5,-91.29 1106.62,-92.56"/>
|
|
||||||
<text text-anchor="middle" x="1190.37" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
|
||||||
<text text-anchor="middle" x="1190.37" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Processing->NotSkippingPrefixNorVersions.Processing -->
|
|
||||||
<g id="edge14" class="edge">
|
|
||||||
<title>SkippingPrefix.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M899.82,-36.01C864.18,-48.2 824.54,-68.57 802.37,-102 771.84,-148.02 779.31,-216.26 786.77,-253.8"/>
|
|
||||||
<polygon fill="black" stroke="black" points="783.43,-254.92 788.94,-263.97 790.28,-253.46 783.43,-254.92"/>
|
|
||||||
<text text-anchor="middle" x="899.37" y="-116.3" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingVersions.Processing->SkippingVersions.Idle -->
|
|
||||||
<g id="edge15" class="edge">
|
|
||||||
<title>SkippingVersions.Processing->SkippingVersions.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M283.88,-36.24C281.71,-50.87 276.4,-71.43 263.37,-84 258.07,-89.11 252.06,-93.48 245.62,-97.21"/>
|
|
||||||
<polygon fill="black" stroke="black" points="243.85,-94.19 236.61,-101.92 247.09,-100.39 243.85,-94.19"/>
|
|
||||||
<text text-anchor="middle" x="349.87" y="-72.8" font-family="Times,serif" font-size="14.00">[isVersionKey(key)]</text>
|
|
||||||
<text text-anchor="middle" x="349.87" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingVersions.Processing->NotSkippingPrefixNorVersions.Processing -->
|
|
||||||
<g id="edge16" class="edge">
|
|
||||||
<title>SkippingVersions.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M382.46,-36.08C396.72,-40.7 410.82,-46.57 423.37,-54 476.67,-85.57 487.28,-102.42 518.37,-156 539.39,-192.23 514.46,-218.85 546.37,-246 561.72,-259.06 598.56,-267.25 639.23,-272.39"/>
|
|
||||||
<polygon fill="black" stroke="black" points="639.01,-275.89 649.36,-273.59 639.84,-268.93 639.01,-275.89"/>
|
|
||||||
<text text-anchor="middle" x="590.37" y="-116.3" font-family="Times,serif" font-size="14.00">[not isVersionKey(key)]</text>
|
|
||||||
</g>
|
|
||||||
<!-- WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing -->
|
|
||||||
<g id="edge17" class="edge">
|
|
||||||
<title>WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M1536.41,-36.13C1544.73,-40.79 1552.27,-46.65 1558.37,-54 1585.64,-86.89 1597.89,-215.12 1568.37,-246 1547.29,-268.05 1167.71,-276.42 947.74,-279.43"/>
|
|
||||||
<polygon fill="black" stroke="black" points="947.67,-275.93 937.71,-279.57 947.76,-282.93 947.67,-275.93"/>
|
|
||||||
<text text-anchor="middle" x="1758.37" y="-123.8" font-family="Times,serif" font-size="14.00">[isVersionKey(key) and master(key) == PHDkey]</text>
|
|
||||||
<text text-anchor="middle" x="1758.37" y="-108.8" font-family="Times,serif" font-size="14.00">/ key <- master(key)</text>
|
|
||||||
</g>
|
|
||||||
<!-- WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing -->
|
|
||||||
<g id="edge18" class="edge">
|
|
||||||
<title>WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M1546.51,-21.25C1677.94,-26.54 1888.29,-44.09 1937.37,-102 1947.71,-114.21 1946.85,-125.11 1937.37,-138 1841.62,-268.08 1749.48,-218.23 1590.37,-246 1471.26,-266.79 1143.92,-275.5 947.77,-278.94"/>
|
|
||||||
<polygon fill="black" stroke="black" points="947.6,-275.44 937.66,-279.11 947.72,-282.44 947.6,-275.44"/>
|
|
||||||
<text text-anchor="middle" x="2124.87" y="-116.3" font-family="Times,serif" font-size="14.00">[not isVersionKey(key) or master(key) != PHDkey]</text>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
Before Width: | Height: | Size: 18 KiB |
|
@ -1,35 +0,0 @@
|
||||||
digraph {
|
|
||||||
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
|
||||||
edge [fontsize=14];
|
|
||||||
rankdir=TB;
|
|
||||||
|
|
||||||
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
|
||||||
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
|
||||||
|
|
||||||
node [fillcolor="lightgrey"];
|
|
||||||
"NotSkipping.Idle" [label="NotSkipping",group="NotSkipping"];
|
|
||||||
"NeverSkipping.Idle" [label="NeverSkipping",group="NeverSkipping"];
|
|
||||||
"NotSkippingPrefix.Idle" [label="NotSkippingPrefix",group="NotSkippingPrefix"];
|
|
||||||
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
|
||||||
|
|
||||||
node [fillcolor="lightblue"];
|
|
||||||
"NeverSkipping.Processing" [label="NeverSkipping",group="NeverSkipping"];
|
|
||||||
"NotSkippingPrefix.Processing" [label="NotSkippingPrefix",group="NotSkippingPrefix"];
|
|
||||||
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
|
||||||
|
|
||||||
START -> "NotSkipping.Idle"
|
|
||||||
"NotSkipping.Idle" -> "NeverSkipping.Idle" [label="[delimiter == undefined]"]
|
|
||||||
"NotSkipping.Idle" -> "NotSkippingPrefix.Idle" [label="[delimiter == '/']"]
|
|
||||||
|
|
||||||
"NeverSkipping.Idle" -> "NeverSkipping.Processing" [label="filter(key, value)"]
|
|
||||||
"NotSkippingPrefix.Idle" -> "NotSkippingPrefix.Processing" [label="filter(key, value)"]
|
|
||||||
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
|
||||||
|
|
||||||
"NeverSkipping.Processing" -> END [label="[nKeys == maxKeys]\n-> FILTER_END"]
|
|
||||||
"NeverSkipping.Processing" -> "NeverSkipping.Idle" [label="[nKeys < maxKeys]\n/ Contents.append(key, value)\n -> FILTER_ACCEPT"]
|
|
||||||
"NotSkippingPrefix.Processing" -> END [label="[nKeys == maxKeys]\n -> FILTER_END"]
|
|
||||||
"NotSkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[nKeys < maxKeys and hasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
|
||||||
"NotSkippingPrefix.Processing" -> "NotSkippingPrefix.Idle" [label="[nKeys < maxKeys and not hasDelimiter(key)]\n/ Contents.append(key, value)\n -> FILTER_ACCEPT"]
|
|
||||||
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
|
||||||
"SkippingPrefix.Processing" -> "NotSkippingPrefix.Processing" [label="[not key.startsWith(prefix)]"]
|
|
||||||
}
|
|
|
@ -1,166 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
|
||||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
|
||||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
|
||||||
<!-- Generated by graphviz version 2.43.0 (0)
|
|
||||||
-->
|
|
||||||
<!-- Title: %3 Pages: 1 -->
|
|
||||||
<svg width="975pt" height="533pt"
|
|
||||||
viewBox="0.00 0.00 975.00 533.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
|
||||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 529)">
|
|
||||||
<title>%3</title>
|
|
||||||
<polygon fill="white" stroke="transparent" points="-4,4 -4,-529 971,-529 971,4 -4,4"/>
|
|
||||||
<!-- START -->
|
|
||||||
<g id="node1" class="node">
|
|
||||||
<title>START</title>
|
|
||||||
<ellipse fill="black" stroke="black" cx="283" cy="-518" rx="7" ry="7"/>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkipping.Idle -->
|
|
||||||
<g id="node3" class="node">
|
|
||||||
<title>NotSkipping.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M379,-474C379,-474 187,-474 187,-474 181,-474 175,-468 175,-462 175,-462 175,-450 175,-450 175,-444 181,-438 187,-438 187,-438 379,-438 379,-438 385,-438 391,-444 391,-450 391,-450 391,-462 391,-462 391,-468 385,-474 379,-474"/>
|
|
||||||
<text text-anchor="middle" x="283" y="-452.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
|
|
||||||
</g>
|
|
||||||
<!-- START->NotSkipping.Idle -->
|
|
||||||
<g id="edge1" class="edge">
|
|
||||||
<title>START->NotSkipping.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M283,-510.58C283,-504.23 283,-494.07 283,-484.3"/>
|
|
||||||
<polygon fill="black" stroke="black" points="286.5,-484.05 283,-474.05 279.5,-484.05 286.5,-484.05"/>
|
|
||||||
</g>
|
|
||||||
<!-- END -->
|
|
||||||
<g id="node2" class="node">
|
|
||||||
<title>END</title>
|
|
||||||
<ellipse fill="black" stroke="black" cx="196" cy="-120" rx="7" ry="7"/>
|
|
||||||
<ellipse fill="none" stroke="black" cx="196" cy="-120" rx="11" ry="11"/>
|
|
||||||
</g>
|
|
||||||
<!-- NeverSkipping.Idle -->
|
|
||||||
<g id="node4" class="node">
|
|
||||||
<title>NeverSkipping.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M262,-387C262,-387 70,-387 70,-387 64,-387 58,-381 58,-375 58,-375 58,-363 58,-363 58,-357 64,-351 70,-351 70,-351 262,-351 262,-351 268,-351 274,-357 274,-363 274,-363 274,-375 274,-375 274,-381 268,-387 262,-387"/>
|
|
||||||
<text text-anchor="middle" x="166" y="-365.2" font-family="Times,serif" font-size="16.00">NeverSkipping</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkipping.Idle->NeverSkipping.Idle -->
|
|
||||||
<g id="edge2" class="edge">
|
|
||||||
<title>NotSkipping.Idle->NeverSkipping.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M216.5,-437.82C206.51,-433.18 196.91,-427.34 189,-420 182.25,-413.74 177.33,-405.11 173.81,-396.79"/>
|
|
||||||
<polygon fill="black" stroke="black" points="177.05,-395.47 170.3,-387.31 170.49,-397.9 177.05,-395.47"/>
|
|
||||||
<text text-anchor="middle" x="279.5" y="-408.8" font-family="Times,serif" font-size="14.00">[delimiter == undefined]</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefix.Idle -->
|
|
||||||
<g id="node5" class="node">
|
|
||||||
<title>NotSkippingPrefix.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M496,-387C496,-387 304,-387 304,-387 298,-387 292,-381 292,-375 292,-375 292,-363 292,-363 292,-357 298,-351 304,-351 304,-351 496,-351 496,-351 502,-351 508,-357 508,-363 508,-363 508,-375 508,-375 508,-381 502,-387 496,-387"/>
|
|
||||||
<text text-anchor="middle" x="400" y="-365.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefix</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkipping.Idle->NotSkippingPrefix.Idle -->
|
|
||||||
<g id="edge3" class="edge">
|
|
||||||
<title>NotSkipping.Idle->NotSkippingPrefix.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M340.77,-437.93C351.2,-433.2 361.45,-427.29 370,-420 377.58,-413.53 383.76,-404.65 388.51,-396.16"/>
|
|
||||||
<polygon fill="black" stroke="black" points="391.63,-397.74 393.08,-387.24 385.4,-394.54 391.63,-397.74"/>
|
|
||||||
<text text-anchor="middle" x="442.5" y="-408.8" font-family="Times,serif" font-size="14.00">[delimiter == '/']</text>
|
|
||||||
</g>
|
|
||||||
<!-- NeverSkipping.Processing -->
|
|
||||||
<g id="node7" class="node">
|
|
||||||
<title>NeverSkipping.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M204,-270C204,-270 12,-270 12,-270 6,-270 0,-264 0,-258 0,-258 0,-246 0,-246 0,-240 6,-234 12,-234 12,-234 204,-234 204,-234 210,-234 216,-240 216,-246 216,-246 216,-258 216,-258 216,-264 210,-270 204,-270"/>
|
|
||||||
<text text-anchor="middle" x="108" y="-248.2" font-family="Times,serif" font-size="16.00">NeverSkipping</text>
|
|
||||||
</g>
|
|
||||||
<!-- NeverSkipping.Idle->NeverSkipping.Processing -->
|
|
||||||
<g id="edge4" class="edge">
|
|
||||||
<title>NeverSkipping.Idle->NeverSkipping.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M64.1,-350.93C47.33,-346.11 33.58,-340.17 28,-333 15.72,-317.21 17.05,-304.74 28,-288 30.93,-283.52 34.58,-279.6 38.69,-276.19"/>
|
|
||||||
<polygon fill="black" stroke="black" points="40.97,-278.86 47.1,-270.22 36.92,-273.16 40.97,-278.86"/>
|
|
||||||
<text text-anchor="middle" x="86" y="-306.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefix.Processing -->
|
|
||||||
<g id="node8" class="node">
|
|
||||||
<title>NotSkippingPrefix.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M554,-270C554,-270 362,-270 362,-270 356,-270 350,-264 350,-258 350,-258 350,-246 350,-246 350,-240 356,-234 362,-234 362,-234 554,-234 554,-234 560,-234 566,-240 566,-246 566,-246 566,-258 566,-258 566,-264 560,-270 554,-270"/>
|
|
||||||
<text text-anchor="middle" x="458" y="-248.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefix</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefix.Idle->NotSkippingPrefix.Processing -->
|
|
||||||
<g id="edge5" class="edge">
|
|
||||||
<title>NotSkippingPrefix.Idle->NotSkippingPrefix.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M395.69,-350.84C392.38,-333.75 390.03,-307.33 401,-288 403.42,-283.74 406.58,-279.94 410.19,-276.55"/>
|
|
||||||
<polygon fill="black" stroke="black" points="412.5,-279.18 418.1,-270.18 408.11,-273.73 412.5,-279.18"/>
|
|
||||||
<text text-anchor="middle" x="459" y="-306.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Idle -->
|
|
||||||
<g id="node6" class="node">
|
|
||||||
<title>SkippingPrefix.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M554,-138C554,-138 362,-138 362,-138 356,-138 350,-132 350,-126 350,-126 350,-114 350,-114 350,-108 356,-102 362,-102 362,-102 554,-102 554,-102 560,-102 566,-108 566,-114 566,-114 566,-126 566,-126 566,-132 560,-138 554,-138"/>
|
|
||||||
<text text-anchor="middle" x="458" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Processing -->
|
|
||||||
<g id="node9" class="node">
|
|
||||||
<title>SkippingPrefix.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M691,-36C691,-36 499,-36 499,-36 493,-36 487,-30 487,-24 487,-24 487,-12 487,-12 487,-6 493,0 499,0 499,0 691,0 691,0 697,0 703,-6 703,-12 703,-12 703,-24 703,-24 703,-30 697,-36 691,-36"/>
|
|
||||||
<text text-anchor="middle" x="595" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
|
||||||
<g id="edge6" class="edge">
|
|
||||||
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M452.35,-101.95C448.76,-87.65 446.54,-67.45 457,-54 461.44,-48.29 471.08,-43.36 483.3,-39.15"/>
|
|
||||||
<polygon fill="black" stroke="black" points="484.61,-42.41 493.1,-36.07 482.51,-35.73 484.61,-42.41"/>
|
|
||||||
<text text-anchor="middle" x="515" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- NeverSkipping.Processing->END -->
|
|
||||||
<g id="edge7" class="edge">
|
|
||||||
<title>NeverSkipping.Processing->END</title>
|
|
||||||
<path fill="none" stroke="black" d="M102.91,-233.88C97.93,-213.45 93.18,-179.15 109,-156 123.79,-134.35 154.41,-126.09 175.08,-122.94"/>
|
|
||||||
<polygon fill="black" stroke="black" points="175.62,-126.4 185.11,-121.69 174.76,-119.45 175.62,-126.4"/>
|
|
||||||
<text text-anchor="middle" x="185" y="-189.8" font-family="Times,serif" font-size="14.00">[nKeys == maxKeys]</text>
|
|
||||||
<text text-anchor="middle" x="185" y="-174.8" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
|
||||||
</g>
|
|
||||||
<!-- NeverSkipping.Processing->NeverSkipping.Idle -->
|
|
||||||
<g id="edge8" class="edge">
|
|
||||||
<title>NeverSkipping.Processing->NeverSkipping.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M129.49,-270.27C134.87,-275.48 140.18,-281.55 144,-288 153.56,-304.17 159.09,-324.63 162.21,-340.81"/>
|
|
||||||
<polygon fill="black" stroke="black" points="158.78,-341.49 163.94,-350.74 165.68,-340.29 158.78,-341.49"/>
|
|
||||||
<text text-anchor="middle" x="265.5" y="-321.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys]</text>
|
|
||||||
<text text-anchor="middle" x="265.5" y="-306.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
|
||||||
<text text-anchor="middle" x="265.5" y="-291.8" font-family="Times,serif" font-size="14.00"> -> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefix.Processing->END -->
|
|
||||||
<g id="edge9" class="edge">
|
|
||||||
<title>NotSkippingPrefix.Processing->END</title>
|
|
||||||
<path fill="none" stroke="black" d="M349.96,-237.93C333,-232.81 316.36,-225.74 302,-216 275.27,-197.87 285.01,-177.6 261,-156 247.64,-143.98 229.41,-134.62 215.65,-128.62"/>
|
|
||||||
<polygon fill="black" stroke="black" points="216.74,-125.28 206.16,-124.7 214.07,-131.75 216.74,-125.28"/>
|
|
||||||
<text text-anchor="middle" x="378" y="-189.8" font-family="Times,serif" font-size="14.00">[nKeys == maxKeys]</text>
|
|
||||||
<text text-anchor="middle" x="378" y="-174.8" font-family="Times,serif" font-size="14.00"> -> FILTER_END</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefix.Processing->NotSkippingPrefix.Idle -->
|
|
||||||
<g id="edge11" class="edge">
|
|
||||||
<title>NotSkippingPrefix.Processing->NotSkippingPrefix.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M499.64,-270.11C506.59,-274.86 512.87,-280.76 517,-288 526.9,-305.38 528.94,-316.96 517,-333 513.56,-337.62 509.53,-341.66 505.07,-345.18"/>
|
|
||||||
<polygon fill="black" stroke="black" points="502.89,-342.43 496.63,-350.98 506.85,-348.2 502.89,-342.43"/>
|
|
||||||
<text text-anchor="middle" x="690.5" y="-321.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys and not hasDelimiter(key)]</text>
|
|
||||||
<text text-anchor="middle" x="690.5" y="-306.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
|
||||||
<text text-anchor="middle" x="690.5" y="-291.8" font-family="Times,serif" font-size="14.00"> -> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefix.Processing->SkippingPrefix.Idle -->
|
|
||||||
<g id="edge10" class="edge">
|
|
||||||
<title>NotSkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M458,-233.74C458,-211.98 458,-174.32 458,-148.56"/>
|
|
||||||
<polygon fill="black" stroke="black" points="461.5,-148.33 458,-138.33 454.5,-148.33 461.5,-148.33"/>
|
|
||||||
<text text-anchor="middle" x="609.5" y="-204.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys and hasDelimiter(key)]</text>
|
|
||||||
<text text-anchor="middle" x="609.5" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
|
||||||
<text text-anchor="middle" x="609.5" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
|
||||||
<text text-anchor="middle" x="609.5" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
|
||||||
<g id="edge12" class="edge">
|
|
||||||
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M593.49,-36.23C591.32,-50.84 586,-71.39 573,-84 567.75,-89.09 561.77,-93.45 555.38,-97.17"/>
|
|
||||||
<polygon fill="black" stroke="black" points="553.66,-94.12 546.43,-101.87 556.91,-100.32 553.66,-94.12"/>
|
|
||||||
<text text-anchor="middle" x="672" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
|
||||||
<text text-anchor="middle" x="672" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Processing->NotSkippingPrefix.Processing -->
|
|
||||||
<g id="edge13" class="edge">
|
|
||||||
<title>SkippingPrefix.Processing->NotSkippingPrefix.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M703.16,-31.64C728.6,-36.87 750.75,-44.11 759,-54 778.46,-77.34 776.26,-200.01 762,-216 749.37,-230.17 656.13,-239.42 576.2,-244.84"/>
|
|
||||||
<polygon fill="black" stroke="black" points="575.77,-241.36 566.03,-245.51 576.24,-248.34 575.77,-241.36"/>
|
|
||||||
<text text-anchor="middle" x="870" y="-116.3" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
Before Width: | Height: | Size: 12 KiB |
|
@ -1,50 +0,0 @@
|
||||||
digraph {
|
|
||||||
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
|
||||||
edge [fontsize=14];
|
|
||||||
rankdir=TB;
|
|
||||||
|
|
||||||
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
|
||||||
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
|
||||||
|
|
||||||
node [fillcolor="lightgrey"];
|
|
||||||
"NotSkipping.Idle" [label="NotSkipping",group="NotSkipping",width=4];
|
|
||||||
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
|
||||||
"WaitForNullKey.Idle" [label="WaitForNullKey",group="WaitForNullKey"];
|
|
||||||
"SkippingVersions.Idle" [label="SkippingVersions",group="SkippingVersions"];
|
|
||||||
|
|
||||||
node [fillcolor="lightblue"];
|
|
||||||
"NotSkipping.Processing" [label="NotSkipping",group="NotSkipping",width=4];
|
|
||||||
"NotSkippingV0.Processing" [label="NotSkippingV0",group="NotSkipping",width=4];
|
|
||||||
"NotSkippingV1.Processing" [label="NotSkippingV1",group="NotSkipping",width=4];
|
|
||||||
"NotSkippingCommon.Processing" [label="NotSkippingCommon",group="NotSkipping",width=4];
|
|
||||||
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
|
||||||
"WaitForNullKey.Processing" [label="WaitForNullKey",group="WaitForNullKey"];
|
|
||||||
"SkippingVersions.Processing" [label="SkippingVersions",group="SkippingVersions"];
|
|
||||||
|
|
||||||
START -> "WaitForNullKey.Idle" [label="[versionIdMarker != undefined]"]
|
|
||||||
START -> "NotSkipping.Idle" [label="[versionIdMarker == undefined]"]
|
|
||||||
|
|
||||||
"NotSkipping.Idle" -> "NotSkipping.Processing" [label="filter(key, value)"]
|
|
||||||
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
|
||||||
"WaitForNullKey.Idle" -> "WaitForNullKey.Processing" [label="filter(key, value)"]
|
|
||||||
"SkippingVersions.Idle" -> "SkippingVersions.Processing" [label="filter(key, value)"]
|
|
||||||
|
|
||||||
"NotSkipping.Processing" -> "NotSkippingV0.Processing" [label="vFormat='v0'"]
|
|
||||||
"NotSkipping.Processing" -> "NotSkippingV1.Processing" [label="vFormat='v1'"]
|
|
||||||
|
|
||||||
"WaitForNullKey.Processing" -> "NotSkipping.Processing" [label="master(key) != keyMarker"]
|
|
||||||
"WaitForNullKey.Processing" -> "SkippingVersions.Processing" [label="master(key) == keyMarker"]
|
|
||||||
"NotSkippingV0.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(<ReplayPrefix>)]\n/ prefix <- <ReplayPrefix>\n-> FILTER_SKIP"]
|
|
||||||
"NotSkippingV0.Processing" -> "NotSkipping.Idle" [label="[Version.isPHD(value)]\n-> FILTER_ACCEPT"]
|
|
||||||
"NotSkippingV0.Processing" -> "NotSkippingCommon.Processing" [label="[not key.startsWith(<ReplayPrefix>)\nand not Version.isPHD(value)]"]
|
|
||||||
"NotSkippingV1.Processing" -> "NotSkippingCommon.Processing" [label="[always]"]
|
|
||||||
"NotSkippingCommon.Processing" -> END [label="[isListableKey(key, value) and\nKeys == maxKeys]\n-> FILTER_END"]
|
|
||||||
"NotSkippingCommon.Processing" -> "SkippingPrefix.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nhasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
|
||||||
"NotSkippingCommon.Processing" -> "NotSkipping.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nnot hasDelimiter(key)]\n/ Contents.append(key, versionId, value)\n-> FILTER_ACCEPT"]
|
|
||||||
|
|
||||||
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
|
||||||
"SkippingPrefix.Processing" -> "NotSkipping.Processing" [label="[not key.startsWith(prefix)]"]
|
|
||||||
"SkippingVersions.Processing" -> "NotSkipping.Processing" [label="master(key) !== keyMarker or \nversionId > versionIdMarker"]
|
|
||||||
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="master(key) === keyMarker and \nversionId < versionIdMarker\n-> FILTER_SKIP"]
|
|
||||||
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="master(key) === keyMarker and \nversionId == versionIdMarker\n-> FILTER_ACCEPT"]
|
|
||||||
}
|
|
|
@ -1,265 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
|
||||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
|
||||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
|
||||||
<!-- Generated by graphviz version 2.43.0 (0)
|
|
||||||
-->
|
|
||||||
<!-- Title: %3 Pages: 1 -->
|
|
||||||
<svg width="1522pt" height="922pt"
|
|
||||||
viewBox="0.00 0.00 1522.26 922.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
|
||||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 918)">
|
|
||||||
<title>%3</title>
|
|
||||||
<polygon fill="white" stroke="transparent" points="-4,4 -4,-918 1518.26,-918 1518.26,4 -4,4"/>
|
|
||||||
<!-- START -->
|
|
||||||
<g id="node1" class="node">
|
|
||||||
<title>START</title>
|
|
||||||
<ellipse fill="black" stroke="black" cx="393.26" cy="-907" rx="7" ry="7"/>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkipping.Idle -->
|
|
||||||
<g id="node3" class="node">
|
|
||||||
<title>NotSkipping.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M436.26,-675C436.26,-675 172.26,-675 172.26,-675 166.26,-675 160.26,-669 160.26,-663 160.26,-663 160.26,-651 160.26,-651 160.26,-645 166.26,-639 172.26,-639 172.26,-639 436.26,-639 436.26,-639 442.26,-639 448.26,-645 448.26,-651 448.26,-651 448.26,-663 448.26,-663 448.26,-669 442.26,-675 436.26,-675"/>
|
|
||||||
<text text-anchor="middle" x="304.26" y="-653.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
|
|
||||||
</g>
|
|
||||||
<!-- START->NotSkipping.Idle -->
|
|
||||||
<g id="edge2" class="edge">
|
|
||||||
<title>START->NotSkipping.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M391.06,-899.87C380.45,-870.31 334.26,-741.58 313.93,-684.93"/>
|
|
||||||
<polygon fill="black" stroke="black" points="317.12,-683.46 310.45,-675.23 310.53,-685.82 317.12,-683.46"/>
|
|
||||||
<text text-anchor="middle" x="470.76" y="-783.8" font-family="Times,serif" font-size="14.00">[versionIdMarker == undefined]</text>
|
|
||||||
</g>
|
|
||||||
<!-- WaitForNullKey.Idle -->
|
|
||||||
<g id="node5" class="node">
|
|
||||||
<title>WaitForNullKey.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M692.26,-849C692.26,-849 500.26,-849 500.26,-849 494.26,-849 488.26,-843 488.26,-837 488.26,-837 488.26,-825 488.26,-825 488.26,-819 494.26,-813 500.26,-813 500.26,-813 692.26,-813 692.26,-813 698.26,-813 704.26,-819 704.26,-825 704.26,-825 704.26,-837 704.26,-837 704.26,-843 698.26,-849 692.26,-849"/>
|
|
||||||
<text text-anchor="middle" x="596.26" y="-827.2" font-family="Times,serif" font-size="16.00">WaitForNullKey</text>
|
|
||||||
</g>
|
|
||||||
<!-- START->WaitForNullKey.Idle -->
|
|
||||||
<g id="edge1" class="edge">
|
|
||||||
<title>START->WaitForNullKey.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M399.56,-903.7C420.56,-896.05 489.7,-870.85 540.08,-852.48"/>
|
|
||||||
<polygon fill="black" stroke="black" points="541.38,-855.73 549.57,-849.02 538.98,-849.16 541.38,-855.73"/>
|
|
||||||
<text text-anchor="middle" x="608.76" y="-870.8" font-family="Times,serif" font-size="14.00">[versionIdMarker != undefined]</text>
|
|
||||||
</g>
|
|
||||||
<!-- END -->
|
|
||||||
<g id="node2" class="node">
|
|
||||||
<title>END</title>
|
|
||||||
<ellipse fill="black" stroke="black" cx="45.26" cy="-120" rx="7" ry="7"/>
|
|
||||||
<ellipse fill="none" stroke="black" cx="45.26" cy="-120" rx="11" ry="11"/>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkipping.Processing -->
|
|
||||||
<g id="node7" class="node">
|
|
||||||
<title>NotSkipping.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M761.26,-558C761.26,-558 497.26,-558 497.26,-558 491.26,-558 485.26,-552 485.26,-546 485.26,-546 485.26,-534 485.26,-534 485.26,-528 491.26,-522 497.26,-522 497.26,-522 761.26,-522 761.26,-522 767.26,-522 773.26,-528 773.26,-534 773.26,-534 773.26,-546 773.26,-546 773.26,-552 767.26,-558 761.26,-558"/>
|
|
||||||
<text text-anchor="middle" x="629.26" y="-536.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkipping.Idle->NotSkipping.Processing -->
|
|
||||||
<g id="edge3" class="edge">
|
|
||||||
<title>NotSkipping.Idle->NotSkipping.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M333.17,-638.98C364.86,-620.99 417.68,-592.92 466.26,-576 483.64,-569.95 502.44,-564.74 520.88,-560.34"/>
|
|
||||||
<polygon fill="black" stroke="black" points="521.83,-563.71 530.78,-558.04 520.25,-556.89 521.83,-563.71"/>
|
|
||||||
<text text-anchor="middle" x="524.26" y="-594.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Idle -->
|
|
||||||
<g id="node4" class="node">
|
|
||||||
<title>SkippingPrefix.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M662.26,-138C662.26,-138 470.26,-138 470.26,-138 464.26,-138 458.26,-132 458.26,-126 458.26,-126 458.26,-114 458.26,-114 458.26,-108 464.26,-102 470.26,-102 470.26,-102 662.26,-102 662.26,-102 668.26,-102 674.26,-108 674.26,-114 674.26,-114 674.26,-126 674.26,-126 674.26,-132 668.26,-138 662.26,-138"/>
|
|
||||||
<text text-anchor="middle" x="566.26" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Processing -->
|
|
||||||
<g id="node11" class="node">
|
|
||||||
<title>SkippingPrefix.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M779.26,-36C779.26,-36 587.26,-36 587.26,-36 581.26,-36 575.26,-30 575.26,-24 575.26,-24 575.26,-12 575.26,-12 575.26,-6 581.26,0 587.26,0 587.26,0 779.26,0 779.26,0 785.26,0 791.26,-6 791.26,-12 791.26,-12 791.26,-24 791.26,-24 791.26,-30 785.26,-36 779.26,-36"/>
|
|
||||||
<text text-anchor="middle" x="683.26" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
|
||||||
<g id="edge4" class="edge">
|
|
||||||
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M552.64,-101.74C543.31,-87.68 534.41,-67.95 545.26,-54 549.71,-48.29 559.34,-43.36 571.56,-39.15"/>
|
|
||||||
<polygon fill="black" stroke="black" points="572.87,-42.41 581.36,-36.07 570.77,-35.73 572.87,-42.41"/>
|
|
||||||
<text text-anchor="middle" x="603.26" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- WaitForNullKey.Processing -->
|
|
||||||
<g id="node12" class="node">
|
|
||||||
<title>WaitForNullKey.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M692.26,-762C692.26,-762 500.26,-762 500.26,-762 494.26,-762 488.26,-756 488.26,-750 488.26,-750 488.26,-738 488.26,-738 488.26,-732 494.26,-726 500.26,-726 500.26,-726 692.26,-726 692.26,-726 698.26,-726 704.26,-732 704.26,-738 704.26,-738 704.26,-750 704.26,-750 704.26,-756 698.26,-762 692.26,-762"/>
|
|
||||||
<text text-anchor="middle" x="596.26" y="-740.2" font-family="Times,serif" font-size="16.00">WaitForNullKey</text>
|
|
||||||
</g>
|
|
||||||
<!-- WaitForNullKey.Idle->WaitForNullKey.Processing -->
|
|
||||||
<g id="edge5" class="edge">
|
|
||||||
<title>WaitForNullKey.Idle->WaitForNullKey.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M596.26,-812.8C596.26,-801.16 596.26,-785.55 596.26,-772.24"/>
|
|
||||||
<polygon fill="black" stroke="black" points="599.76,-772.18 596.26,-762.18 592.76,-772.18 599.76,-772.18"/>
|
|
||||||
<text text-anchor="middle" x="654.26" y="-783.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingVersions.Idle -->
|
|
||||||
<g id="node6" class="node">
|
|
||||||
<title>SkippingVersions.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M1241.26,-558C1241.26,-558 1049.26,-558 1049.26,-558 1043.26,-558 1037.26,-552 1037.26,-546 1037.26,-546 1037.26,-534 1037.26,-534 1037.26,-528 1043.26,-522 1049.26,-522 1049.26,-522 1241.26,-522 1241.26,-522 1247.26,-522 1253.26,-528 1253.26,-534 1253.26,-534 1253.26,-546 1253.26,-546 1253.26,-552 1247.26,-558 1241.26,-558"/>
|
|
||||||
<text text-anchor="middle" x="1145.26" y="-536.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingVersions.Processing -->
|
|
||||||
<g id="node13" class="node">
|
|
||||||
<title>SkippingVersions.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M1241.26,-675C1241.26,-675 1049.26,-675 1049.26,-675 1043.26,-675 1037.26,-669 1037.26,-663 1037.26,-663 1037.26,-651 1037.26,-651 1037.26,-645 1043.26,-639 1049.26,-639 1049.26,-639 1241.26,-639 1241.26,-639 1247.26,-639 1253.26,-645 1253.26,-651 1253.26,-651 1253.26,-663 1253.26,-663 1253.26,-669 1247.26,-675 1241.26,-675"/>
|
|
||||||
<text text-anchor="middle" x="1145.26" y="-653.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingVersions.Idle->SkippingVersions.Processing -->
|
|
||||||
<g id="edge6" class="edge">
|
|
||||||
<title>SkippingVersions.Idle->SkippingVersions.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M1145.26,-558.25C1145.26,-576.77 1145.26,-606.45 1145.26,-628.25"/>
|
|
||||||
<polygon fill="black" stroke="black" points="1141.76,-628.53 1145.26,-638.53 1148.76,-628.53 1141.76,-628.53"/>
|
|
||||||
<text text-anchor="middle" x="1203.26" y="-594.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingV0.Processing -->
|
|
||||||
<g id="node8" class="node">
|
|
||||||
<title>NotSkippingV0.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M436.26,-411C436.26,-411 172.26,-411 172.26,-411 166.26,-411 160.26,-405 160.26,-399 160.26,-399 160.26,-387 160.26,-387 160.26,-381 166.26,-375 172.26,-375 172.26,-375 436.26,-375 436.26,-375 442.26,-375 448.26,-381 448.26,-387 448.26,-387 448.26,-399 448.26,-399 448.26,-405 442.26,-411 436.26,-411"/>
|
|
||||||
<text text-anchor="middle" x="304.26" y="-389.2" font-family="Times,serif" font-size="16.00">NotSkippingV0</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkipping.Processing->NotSkippingV0.Processing -->
|
|
||||||
<g id="edge7" class="edge">
|
|
||||||
<title>NotSkipping.Processing->NotSkippingV0.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M573.96,-521.95C558.07,-516.64 540.84,-510.46 525.26,-504 460.22,-477.02 387.62,-439.36 343.97,-415.84"/>
|
|
||||||
<polygon fill="black" stroke="black" points="345.57,-412.72 335.11,-411.04 342.24,-418.88 345.57,-412.72"/>
|
|
||||||
<text text-anchor="middle" x="573.76" y="-462.8" font-family="Times,serif" font-size="14.00">vFormat='v0'</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingV1.Processing -->
|
|
||||||
<g id="node9" class="node">
|
|
||||||
<title>NotSkippingV1.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M758.26,-411C758.26,-411 494.26,-411 494.26,-411 488.26,-411 482.26,-405 482.26,-399 482.26,-399 482.26,-387 482.26,-387 482.26,-381 488.26,-375 494.26,-375 494.26,-375 758.26,-375 758.26,-375 764.26,-375 770.26,-381 770.26,-387 770.26,-387 770.26,-399 770.26,-399 770.26,-405 764.26,-411 758.26,-411"/>
|
|
||||||
<text text-anchor="middle" x="626.26" y="-389.2" font-family="Times,serif" font-size="16.00">NotSkippingV1</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkipping.Processing->NotSkippingV1.Processing -->
|
|
||||||
<g id="edge8" class="edge">
|
|
||||||
<title>NotSkipping.Processing->NotSkippingV1.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M628.91,-521.8C628.39,-496.94 627.44,-450.74 626.83,-421.23"/>
|
|
||||||
<polygon fill="black" stroke="black" points="630.32,-421.11 626.62,-411.18 623.33,-421.25 630.32,-421.11"/>
|
|
||||||
<text text-anchor="middle" x="676.76" y="-462.8" font-family="Times,serif" font-size="14.00">vFormat='v1'</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingV0.Processing->NotSkipping.Idle -->
|
|
||||||
<g id="edge12" class="edge">
|
|
||||||
<title>NotSkippingV0.Processing->NotSkipping.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M304.26,-411.25C304.26,-455.74 304.26,-574.61 304.26,-628.62"/>
|
|
||||||
<polygon fill="black" stroke="black" points="300.76,-628.81 304.26,-638.81 307.76,-628.81 300.76,-628.81"/>
|
|
||||||
<text text-anchor="middle" x="385.76" y="-543.8" font-family="Times,serif" font-size="14.00">[Version.isPHD(value)]</text>
|
|
||||||
<text text-anchor="middle" x="385.76" y="-528.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingV0.Processing->SkippingPrefix.Idle -->
|
|
||||||
<g id="edge11" class="edge">
|
|
||||||
<title>NotSkippingV0.Processing->SkippingPrefix.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M448.41,-376.93C508.52,-369.95 565.63,-362.09 570.26,-357 622.9,-299.12 594.8,-196.31 577.11,-147.78"/>
|
|
||||||
<polygon fill="black" stroke="black" points="580.33,-146.4 573.53,-138.28 573.78,-148.87 580.33,-146.4"/>
|
|
||||||
<text text-anchor="middle" x="720.26" y="-297.8" font-family="Times,serif" font-size="14.00">[key.startsWith(<ReplayPrefix>)]</text>
|
|
||||||
<text text-anchor="middle" x="720.26" y="-282.8" font-family="Times,serif" font-size="14.00">/ prefix <- <ReplayPrefix></text>
|
|
||||||
<text text-anchor="middle" x="720.26" y="-267.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingCommon.Processing -->
|
|
||||||
<g id="node10" class="node">
|
|
||||||
<title>NotSkippingCommon.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M436.26,-304.5C436.26,-304.5 172.26,-304.5 172.26,-304.5 166.26,-304.5 160.26,-298.5 160.26,-292.5 160.26,-292.5 160.26,-280.5 160.26,-280.5 160.26,-274.5 166.26,-268.5 172.26,-268.5 172.26,-268.5 436.26,-268.5 436.26,-268.5 442.26,-268.5 448.26,-274.5 448.26,-280.5 448.26,-280.5 448.26,-292.5 448.26,-292.5 448.26,-298.5 442.26,-304.5 436.26,-304.5"/>
|
|
||||||
<text text-anchor="middle" x="304.26" y="-282.7" font-family="Times,serif" font-size="16.00">NotSkippingCommon</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingV0.Processing->NotSkippingCommon.Processing -->
|
|
||||||
<g id="edge13" class="edge">
|
|
||||||
<title>NotSkippingV0.Processing->NotSkippingCommon.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M304.26,-374.74C304.26,-358.48 304.26,-333.85 304.26,-314.9"/>
|
|
||||||
<polygon fill="black" stroke="black" points="307.76,-314.78 304.26,-304.78 300.76,-314.78 307.76,-314.78"/>
|
|
||||||
<text text-anchor="middle" x="435.26" y="-345.8" font-family="Times,serif" font-size="14.00">[not key.startsWith(<ReplayPrefix>)</text>
|
|
||||||
<text text-anchor="middle" x="435.26" y="-330.8" font-family="Times,serif" font-size="14.00">and not Version.isPHD(value)]</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingV1.Processing->NotSkippingCommon.Processing -->
|
|
||||||
<g id="edge14" class="edge">
|
|
||||||
<title>NotSkippingV1.Processing->NotSkippingCommon.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M616.43,-374.83C606.75,-359.62 590.48,-338.14 570.26,-327 549.98,-315.83 505.48,-307.38 458.57,-301.23"/>
|
|
||||||
<polygon fill="black" stroke="black" points="458.9,-297.74 448.53,-299.95 458.01,-304.69 458.9,-297.74"/>
|
|
||||||
<text text-anchor="middle" x="632.26" y="-338.3" font-family="Times,serif" font-size="14.00">[always]</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingCommon.Processing->END -->
|
|
||||||
<g id="edge15" class="edge">
|
|
||||||
<title>NotSkippingCommon.Processing->END</title>
|
|
||||||
<path fill="none" stroke="black" d="M159.92,-279.56C109.8,-274.24 62.13,-264.33 46.26,-246 20.92,-216.72 30.42,-167.54 38.5,-140.42"/>
|
|
||||||
<polygon fill="black" stroke="black" points="41.94,-141.16 41.67,-130.57 35.27,-139.02 41.94,-141.16"/>
|
|
||||||
<text text-anchor="middle" x="152.76" y="-212.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
|
||||||
<text text-anchor="middle" x="152.76" y="-197.3" font-family="Times,serif" font-size="14.00">Keys == maxKeys]</text>
|
|
||||||
<text text-anchor="middle" x="152.76" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingCommon.Processing->NotSkipping.Idle -->
|
|
||||||
<g id="edge17" class="edge">
|
|
||||||
<title>NotSkippingCommon.Processing->NotSkipping.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M214.74,-304.54C146.51,-322.73 57.06,-358.99 13.26,-429 -49.27,-528.95 128.43,-602.49 233.32,-635.95"/>
|
|
||||||
<polygon fill="black" stroke="black" points="232.34,-639.31 242.93,-638.97 234.43,-632.63 232.34,-639.31"/>
|
|
||||||
<text text-anchor="middle" x="156.76" y="-492.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
|
||||||
<text text-anchor="middle" x="156.76" y="-477.8" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
|
||||||
<text text-anchor="middle" x="156.76" y="-462.8" font-family="Times,serif" font-size="14.00">not hasDelimiter(key)]</text>
|
|
||||||
<text text-anchor="middle" x="156.76" y="-447.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, versionId, value)</text>
|
|
||||||
<text text-anchor="middle" x="156.76" y="-432.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingCommon.Processing->SkippingPrefix.Idle -->
|
|
||||||
<g id="edge16" class="edge">
|
|
||||||
<title>NotSkippingCommon.Processing->SkippingPrefix.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M292.14,-268.23C288.18,-261.59 284.27,-253.75 282.26,-246 272.21,-207.28 255.76,-185.96 282.26,-156 293.6,-143.18 374.98,-134.02 447.74,-128.3"/>
|
|
||||||
<polygon fill="black" stroke="black" points="448.24,-131.77 457.94,-127.51 447.7,-124.79 448.24,-131.77"/>
|
|
||||||
<text text-anchor="middle" x="428.26" y="-234.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
|
||||||
<text text-anchor="middle" x="428.26" y="-219.8" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
|
||||||
<text text-anchor="middle" x="428.26" y="-204.8" font-family="Times,serif" font-size="14.00">hasDelimiter(key)]</text>
|
|
||||||
<text text-anchor="middle" x="428.26" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
|
||||||
<text text-anchor="middle" x="428.26" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
|
||||||
<text text-anchor="middle" x="428.26" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
|
||||||
<g id="edge18" class="edge">
|
|
||||||
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M681.57,-36.04C679.28,-50.54 673.9,-71.03 661.26,-84 656.4,-88.99 650.77,-93.28 644.72,-96.95"/>
|
|
||||||
<polygon fill="black" stroke="black" points="642.71,-94.06 635.6,-101.92 646.05,-100.21 642.71,-94.06"/>
|
|
||||||
<text text-anchor="middle" x="759.26" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
|
||||||
<text text-anchor="middle" x="759.26" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Processing->NotSkipping.Processing -->
|
|
||||||
<g id="edge19" class="edge">
|
|
||||||
<title>SkippingPrefix.Processing->NotSkipping.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M791.46,-33.51C815.84,-38.71 837.21,-45.46 846.26,-54 868.07,-74.57 864.26,-89.02 864.26,-119 864.26,-394 864.26,-394 864.26,-394 864.26,-462.4 791.27,-499.6 726.64,-519.12"/>
|
|
||||||
<polygon fill="black" stroke="black" points="725.39,-515.84 716.77,-521.99 727.35,-522.56 725.39,-515.84"/>
|
|
||||||
<text text-anchor="middle" x="961.26" y="-282.8" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
|
||||||
</g>
|
|
||||||
<!-- WaitForNullKey.Processing->NotSkipping.Processing -->
|
|
||||||
<g id="edge9" class="edge">
|
|
||||||
<title>WaitForNullKey.Processing->NotSkipping.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M599.08,-725.78C604.81,-690.67 617.89,-610.59 624.8,-568.31"/>
|
|
||||||
<polygon fill="black" stroke="black" points="628.3,-568.61 626.46,-558.18 621.39,-567.48 628.3,-568.61"/>
|
|
||||||
<text text-anchor="middle" x="707.26" y="-653.3" font-family="Times,serif" font-size="14.00">master(key) != keyMarker</text>
|
|
||||||
</g>
|
|
||||||
<!-- WaitForNullKey.Processing->SkippingVersions.Processing -->
|
|
||||||
<g id="edge10" class="edge">
|
|
||||||
<title>WaitForNullKey.Processing->SkippingVersions.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M704.4,-726.26C797.32,-711.87 931.09,-691.16 1026.87,-676.33"/>
|
|
||||||
<polygon fill="black" stroke="black" points="1027.55,-679.77 1036.89,-674.78 1026.47,-672.85 1027.55,-679.77"/>
|
|
||||||
<text text-anchor="middle" x="1001.26" y="-696.8" font-family="Times,serif" font-size="14.00">master(key) == keyMarker</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingVersions.Processing->SkippingVersions.Idle -->
|
|
||||||
<g id="edge21" class="edge">
|
|
||||||
<title>SkippingVersions.Processing->SkippingVersions.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M1241.89,-638.98C1249.74,-634.29 1256.75,-628.4 1262.26,-621 1274.21,-604.96 1274.21,-592.04 1262.26,-576 1258.82,-571.38 1254.79,-567.34 1250.33,-563.82"/>
|
|
||||||
<polygon fill="black" stroke="black" points="1252.11,-560.8 1241.89,-558.02 1248.15,-566.57 1252.11,-560.8"/>
|
|
||||||
<text text-anchor="middle" x="1392.26" y="-609.8" font-family="Times,serif" font-size="14.00">master(key) === keyMarker and </text>
|
|
||||||
<text text-anchor="middle" x="1392.26" y="-594.8" font-family="Times,serif" font-size="14.00">versionId < versionIdMarker</text>
|
|
||||||
<text text-anchor="middle" x="1392.26" y="-579.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingVersions.Processing->SkippingVersions.Idle -->
|
|
||||||
<g id="edge22" class="edge">
|
|
||||||
<title>SkippingVersions.Processing->SkippingVersions.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M1036.97,-654.38C978.97,-650.96 915.73,-642.25 897.26,-621 884.15,-605.9 884.15,-591.1 897.26,-576 914.65,-555.99 971.71,-547.1 1026.73,-543.28"/>
|
|
||||||
<polygon fill="black" stroke="black" points="1027.21,-546.76 1036.97,-542.62 1026.76,-539.77 1027.21,-546.76"/>
|
|
||||||
<text text-anchor="middle" x="1019.26" y="-609.8" font-family="Times,serif" font-size="14.00">master(key) === keyMarker and </text>
|
|
||||||
<text text-anchor="middle" x="1019.26" y="-594.8" font-family="Times,serif" font-size="14.00">versionId == versionIdMarker</text>
|
|
||||||
<text text-anchor="middle" x="1019.26" y="-579.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingVersions.Processing->NotSkipping.Processing -->
|
|
||||||
<g id="edge20" class="edge">
|
|
||||||
<title>SkippingVersions.Processing->NotSkipping.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M1037.02,-651.24C897.84,-644.67 672.13,-632.37 657.26,-621 641.04,-608.6 634.18,-586.13 631.3,-568.16"/>
|
|
||||||
<polygon fill="black" stroke="black" points="634.76,-567.68 630.02,-558.21 627.82,-568.57 634.76,-567.68"/>
|
|
||||||
<text text-anchor="middle" x="770.26" y="-602.3" font-family="Times,serif" font-size="14.00">master(key) !== keyMarker or </text>
|
|
||||||
<text text-anchor="middle" x="770.26" y="-587.3" font-family="Times,serif" font-size="14.00">versionId > versionIdMarker</text>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
Before Width: | Height: | Size: 21 KiB |
|
@ -0,0 +1,741 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<base data-ice="baseUrl" href="../../">
|
||||||
|
<title data-ice="title">kinetic/Kinetic.js | API Document</title>
|
||||||
|
<link type="text/css" rel="stylesheet" href="css/style.css">
|
||||||
|
<link type="text/css" rel="stylesheet" href="css/prettify-tomorrow.css">
|
||||||
|
<script src="script/prettify/prettify.js"></script>
|
||||||
|
|
||||||
|
|
||||||
|
<script src="script/manual.js"></script>
|
||||||
|
</head>
|
||||||
|
<body class="layout-container" data-ice="rootContainer">
|
||||||
|
|
||||||
|
<header>
|
||||||
|
<a href="./">Home</a>
|
||||||
|
|
||||||
|
<a href="identifiers.html">Reference</a>
|
||||||
|
<a href="source.html">Source</a>
|
||||||
|
|
||||||
|
<a data-ice="repoURL" href="git+https://github.com/scality/IronMan-Arsenal.git">Repository</a>
|
||||||
|
<div class="search-box">
|
||||||
|
<span>
|
||||||
|
<img src="./image/search.png">
|
||||||
|
<span class="search-input-edge"></span><input class="search-input"><span class="search-input-edge"></span>
|
||||||
|
</span>
|
||||||
|
<ul class="search-result"></ul>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<nav class="navigation" data-ice="nav"><div>
|
||||||
|
<ul>
|
||||||
|
|
||||||
|
<li data-ice="doc"><span data-ice="kind" class="kind-class">C</span><span data-ice="name"><span><a href="class/kinetic/Kinetic.js~Kinetic.html">Kinetic</a></span></span></li>
|
||||||
|
<li data-ice="doc"><span data-ice="kind" class="kind-variable">V</span><span data-ice="name"><span><a href="variable/index.html#static-variable-kinetic">kinetic</a></span></span></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
<div class="content" data-ice="content"><h1 data-ice="title">kinetic/Kinetic.js</h1>
|
||||||
|
<pre class="source-code line-number raw-source-code"><code class="prettyprint linenums" data-ice="content">import protobuf from 'protobufjs';
|
||||||
|
import crypto from 'crypto';
|
||||||
|
|
||||||
|
const VERSION = 0x46;
|
||||||
|
const protoFilePath = __dirname + '/kinetic.proto';
|
||||||
|
const buildName = 'com.seagate.kinetic.proto';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Represents the Kinetic Protocol Data Structure.
|
||||||
|
* @constructor
|
||||||
|
*/
|
||||||
|
class Kinetic {
|
||||||
|
constructor() {
|
||||||
|
this._version = VERSION;
|
||||||
|
this.logs = {
|
||||||
|
UTILIZATIONS: 0,
|
||||||
|
TEMPERATURES: 1,
|
||||||
|
CAPACITIES: 2,
|
||||||
|
CONFIGURATION: 3,
|
||||||
|
STATISTICS: 4,
|
||||||
|
MESSAGES: 5,
|
||||||
|
LIMITS: 6,
|
||||||
|
DEVICE: 7,
|
||||||
|
};
|
||||||
|
this.op = {
|
||||||
|
PUT: 4,
|
||||||
|
PUT_RESPONSE: 3,
|
||||||
|
GET: 2,
|
||||||
|
GET_RESPONSE: 1,
|
||||||
|
NOOP: 30,
|
||||||
|
NOOP_RESPONSE: 29,
|
||||||
|
DELETE: 6,
|
||||||
|
DELETE_RESPONSE: 5,
|
||||||
|
SET_CLUSTER_VERSION: 22,
|
||||||
|
SETUP_RESPONSE: 21,
|
||||||
|
FLUSH: 32,
|
||||||
|
FLUSH_RESPONSE: 31,
|
||||||
|
GETLOG: 24,
|
||||||
|
GETLOG_RESPONSE: 23,
|
||||||
|
};
|
||||||
|
this.errors = {
|
||||||
|
INVALID_STATUS_CODE: -1,
|
||||||
|
NOT_ATTEMPTED: 0,
|
||||||
|
SUCCESS: 1,
|
||||||
|
HMAC_FAILURE: 2,
|
||||||
|
NOT_AUTHORIZED: 3,
|
||||||
|
VERSION_FAILURE: 4,
|
||||||
|
INTERNAL_ERROR: 5,
|
||||||
|
HEADER_REQUIRED: 6,
|
||||||
|
NOT_FOUND: 7,
|
||||||
|
VERSION_MISMATCH: 8,
|
||||||
|
SERVICE_BUSY: 9,
|
||||||
|
EXPIRED: 10,
|
||||||
|
DATA_ERROR: 11,
|
||||||
|
PERM_DATA_ERROR: 12,
|
||||||
|
REMOTE_CONNECTION_ERROR: 13,
|
||||||
|
NO_SPACE: 14,
|
||||||
|
NO_SUCH_HMAC_ALGORITHM: 15,
|
||||||
|
INVALID_REQUEST: 16,
|
||||||
|
NESTED_OPERATION_ERRORS: 17,
|
||||||
|
DEVICE_LOCKED: 18,
|
||||||
|
DEVICE_ALREADY_UNLOCKED: 19,
|
||||||
|
CONNECTION_TERMINATED: 20,
|
||||||
|
INVALID_BATCH: 21,
|
||||||
|
};
|
||||||
|
this.build = protobuf.loadProtoFile(protoFilePath).build(buildName);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Slice the buffer with the offset and the limit.
|
||||||
|
* @param {Object} obj - an object buffer with offset and limit.
|
||||||
|
* @returns {Buffer} sliced buffer from the buffer structure with the offset
|
||||||
|
* and the limit.
|
||||||
|
*/
|
||||||
|
getSlice(obj) {
|
||||||
|
return obj.buffer.slice(obj.offset, obj.limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the actual protobuf message for the Kinetic Protocol Data Unit.
|
||||||
|
* @param {Object} pbMessage - the well formated kinetic protobuf structure.
|
||||||
|
* @returns {Kinetic} to allow for a functional style.
|
||||||
|
*/
|
||||||
|
setProtobuf(pbMessage) {
|
||||||
|
this._message = pbMessage;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the chunk for the Kinetic Protocol Data Unit.
|
||||||
|
* @param {Buffer} chunk.
|
||||||
|
* @returns {Kinetic} to allow for a functional style.
|
||||||
|
*/
|
||||||
|
setChunk(chunk) {
|
||||||
|
this._chunk = chunk;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the general protobuf message for the Kinetic Protocol Data Unit.
|
||||||
|
* @param {Object} command - the well formated general kinetic protobuf
|
||||||
|
* structure.
|
||||||
|
* @returns {Kinetic} setting the protobuf message.
|
||||||
|
*/
|
||||||
|
setCommand(command) {
|
||||||
|
const message = new this.build.Command(command);
|
||||||
|
return this.setProtobuf(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the HMAC for the Kinetic Protocol Data Unit integrity.
|
||||||
|
* @param {Buffer} secret - the shared secret.
|
||||||
|
* @returns {Kinetic} to allow for a functional style.
|
||||||
|
*/
|
||||||
|
setHMAC() {
|
||||||
|
this._hmac = crypto.createHmac('sha1', 'asdfasdf')
|
||||||
|
.update(this.getProtobuf().toBuffer()).digest();
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the actual version of the kinetic protocol.
|
||||||
|
* @returns {Number} the current version of the kinetic protocol.
|
||||||
|
*/
|
||||||
|
getVersion() {
|
||||||
|
return this._version;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the actual protobuf message.
|
||||||
|
* @returns {Object} Kinetic protobuf message.
|
||||||
|
*/
|
||||||
|
getProtobuf() {
|
||||||
|
return this._message;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the actual protobuf message size.
|
||||||
|
* @returns {Number} Size of the kinetic protobuf message.
|
||||||
|
*/
|
||||||
|
getProtobufSize() {
|
||||||
|
return this.getProtobuf().calculate();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the actual chunk.
|
||||||
|
* @returns {Buffer} Chunk.
|
||||||
|
*/
|
||||||
|
getChunk() {
|
||||||
|
return this._chunk;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the actual chunk size.
|
||||||
|
* @returns {Number} Chunk size.
|
||||||
|
*/
|
||||||
|
getChunkSize() {
|
||||||
|
return this._chunk.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the general build template.
|
||||||
|
* @returns {Object} General kinetic protobuf structure.
|
||||||
|
*/
|
||||||
|
getCommand() {
|
||||||
|
return this.build.Command;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the actual HMAC.
|
||||||
|
* @returns {Buffer} HMAC.
|
||||||
|
*/
|
||||||
|
getHMAC() {
|
||||||
|
return this._hmac;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the actual request messageType.
|
||||||
|
* @returns {Number} The code number of the request.
|
||||||
|
*/
|
||||||
|
getMessageType() {
|
||||||
|
return this.getProtobuf().header.messageType;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the actual key.
|
||||||
|
* @returns {Buffer} Key.
|
||||||
|
*/
|
||||||
|
getKey() {
|
||||||
|
return this.getSlice(this.getProtobuf().body.keyValue.key);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the version of the data unit in the database.
|
||||||
|
* @returns {Buffer} Version of the data unit in the database.
|
||||||
|
*/
|
||||||
|
getDbVersion() {
|
||||||
|
return this.getSlice(this.getProtobuf().body.keyValue.dbVersion);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the new version of the data unit.
|
||||||
|
* @returns {Buffer} New version of the data unit.
|
||||||
|
*/
|
||||||
|
getNewVersion() {
|
||||||
|
return this.getSlice(this.getProtobuf().body.keyValue.newVersion);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the detailed error message.
|
||||||
|
* @returns {Buffer} Detailed error message.
|
||||||
|
*/
|
||||||
|
getErrorMessage() {
|
||||||
|
return this.getSlice(this.getProtobuf().status.detailedMessage);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the logs message.
|
||||||
|
* @returns {Buffer} Logs message.
|
||||||
|
*/
|
||||||
|
getGetLogMessage() {
|
||||||
|
return this.getSlice(this.getProtobuf().body.getLog.messages);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the operartion name with it code.
|
||||||
|
* @param {Number} opCode - the operation code.
|
||||||
|
* @returns {String} operation name.
|
||||||
|
*/
|
||||||
|
getOp(opCode) {
|
||||||
|
return this.getKeyByValue(this.op, opCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the error name with it code.
|
||||||
|
* @param {Number} errorCode - the error code.
|
||||||
|
* @returns {String} error name.
|
||||||
|
*/
|
||||||
|
getError(errorCode) {
|
||||||
|
return this.getKeyByValue(this.errors, errorCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the log type name with it code.
|
||||||
|
* @param {Number} logCode - the log type code.
|
||||||
|
* @returns {String} log type name.
|
||||||
|
*/
|
||||||
|
getLogType(logCode) {
|
||||||
|
return this.getKeyByValue(this.logs, logCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets the key of an object with it value.
|
||||||
|
* @param {Object} object - the corresponding object.
|
||||||
|
* @param {value} value - the corresponding value.
|
||||||
|
* @returns {Buffer} object key.
|
||||||
|
*/
|
||||||
|
getKeyByValue(object, value) {
|
||||||
|
return Object.keys(object).find(key => object[key] === value);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compare two buffers.
|
||||||
|
* @param {Buffer} buf0/buf1 - the buffers to compare.
|
||||||
|
* @returns {Boolean} false if it's different true if not.
|
||||||
|
*/
|
||||||
|
diff(buf0, buf1) {
|
||||||
|
if (buf0.length !== buf1.length) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
for (let i = 0; i <= buf0.length; i++) {
|
||||||
|
if (buf0[i] !== buf1[i])
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test the HMAC integrity between the actual instance and the given HMAC.
|
||||||
|
* @param {Buffer} hmac - the non instance hmac to compare.
|
||||||
|
* @returns {Boolean} true if the HMACs are the same.
|
||||||
|
* @returns an error if they are different.
|
||||||
|
*/
|
||||||
|
hmacIntegrity(hmac) {
|
||||||
|
if (hmac === undefined || this.getHMAC() === undefined)
|
||||||
|
return this.errors.HMAC_FAILURE;
|
||||||
|
if (this.diff(hmac, this.getHMAC()) === false)
|
||||||
|
return this.errors.HMAC_FAILURE;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Getting logs and stats request following the kinetic protocol.
|
||||||
|
* @param {number} incrementTCP - monotonically increasing number for each
|
||||||
|
* request in a TCP connection.
|
||||||
|
* @param {Array} types - array filled by logs types needed.
|
||||||
|
* @param {number} clusterVersion - version of the cluster
|
||||||
|
* @returns {Kinetic} this - message structure following the kinetic
|
||||||
|
* protocol
|
||||||
|
*/
|
||||||
|
getLog(incrementTCP, types, clusterVersion) {
|
||||||
|
const identity = (new Date).getTime();
|
||||||
|
return this.setCommand({
|
||||||
|
"header": {
|
||||||
|
"messageType": "GETLOG",
|
||||||
|
"connectionID": identity,
|
||||||
|
"sequence": incrementTCP,
|
||||||
|
"clusterVersion": clusterVersion,
|
||||||
|
},
|
||||||
|
"body": {
|
||||||
|
"getLog": {
|
||||||
|
"types": types,
|
||||||
|
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Getting logs and stats response following the kinetic protocol.
|
||||||
|
* @param {String or number} response - response code (SUCCESS, FAIL)
|
||||||
|
* @param {String or Buffer} errorMessage - detailed error message.
|
||||||
|
* @param {object} responseLogs - object filled by logs needed.
|
||||||
|
* @returns {Kinetic} this - message structure following the kinetic
|
||||||
|
* protocol
|
||||||
|
*/
|
||||||
|
getLogResponse(response, errorMessage, responseLogs) {
|
||||||
|
return this.setCommand({
|
||||||
|
"header": {
|
||||||
|
"ackSequence": this.getProtobuf().header.sequence,
|
||||||
|
"messageType": "GETLOG_RESPONSE",
|
||||||
|
},
|
||||||
|
"body": {
|
||||||
|
"getLog": responseLogs,
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"code": response,
|
||||||
|
"detailedMessage": errorMessage,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Flush all data request following the kinetic protocol.
|
||||||
|
* @param {number} incrementTCP - monotonically increasing number for each
|
||||||
|
* request in a TCP connection.
|
||||||
|
* @param {number} clusterVersion - version of the cluster
|
||||||
|
* @returns {Kinetic} this - message structure following the kinetic
|
||||||
|
* protocol
|
||||||
|
*/
|
||||||
|
flush(incrementTCP, clusterVersion) {
|
||||||
|
const identity = (new Date).getTime();
|
||||||
|
return this.setCommand({
|
||||||
|
"header": {
|
||||||
|
"messageType": "FLUSHALLDATA",
|
||||||
|
"connectionID": identity,
|
||||||
|
"sequence": incrementTCP,
|
||||||
|
"clusterVersion": clusterVersion,
|
||||||
|
},
|
||||||
|
"body": { },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Flush all data response following the kinetic protocol.
|
||||||
|
* @param {String or number} response - response code (SUCCESS, FAIL)
|
||||||
|
* @param {String or Buffer} errorMessage - detailed error message.
|
||||||
|
* @returns {Kinetic} this - message structure following the kinetic
|
||||||
|
* protocol
|
||||||
|
*/
|
||||||
|
flushResponse(response, errorMessage) {
|
||||||
|
return this.setCommand({
|
||||||
|
"header": {
|
||||||
|
"messageType": "FLUSHALLDATA_RESPONSE",
|
||||||
|
"ackSequence": this.getProtobuf().header.sequence,
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"code": response,
|
||||||
|
"detailedMessage": errorMessage,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* set clusterVersion request following the kinetic protocol.
|
||||||
|
* @param {number} incrementTCP - monotonically increasing number for each
|
||||||
|
* request in a TCP connection.
|
||||||
|
* @param {number} clusterVersion - The version number of this cluster
|
||||||
|
* definition
|
||||||
|
* @param {number} oldClusterVersion - The old version number of this
|
||||||
|
* cluster definition
|
||||||
|
* @returns {Kinetic} this - message structure following the kinetic
|
||||||
|
* protocol
|
||||||
|
*/
|
||||||
|
setClusterVersion(incrementTCP, clusterVersion, oldClusterVersion) {
|
||||||
|
const identity = (new Date).getTime();
|
||||||
|
return this.setCommand({
|
||||||
|
"header": {
|
||||||
|
"messageType": "SETUP",
|
||||||
|
"connectionID": identity,
|
||||||
|
"sequence": incrementTCP,
|
||||||
|
"clusterVersion": oldClusterVersion,
|
||||||
|
},
|
||||||
|
"body": {
|
||||||
|
"setup": {
|
||||||
|
"newClusterVersion": clusterVersion,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup response request following the kinetic protocol.
|
||||||
|
* @param {String or number} response - response code (SUCCESS, FAIL)
|
||||||
|
* @param {String or Buffer} errorMessage - detailed error message.
|
||||||
|
* @returns {Kinetic} this - message structure following the kinetic
|
||||||
|
* protocol
|
||||||
|
*/
|
||||||
|
setupResponse(response, errorMessage) {
|
||||||
|
return this.setCommand({
|
||||||
|
"header": {
|
||||||
|
"messageType": "SETUP_RESPONSE",
|
||||||
|
"ackSequence": this.getProtobuf().header.sequence,
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"code": response,
|
||||||
|
"detailedMessage": errorMessage,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* NOOP request following the kinetic protocol.
|
||||||
|
* @param {number} incrementTCP - monotonically increasing number for each
|
||||||
|
* request in a TCP connection
|
||||||
|
* @param {number} clusterVersion - The version number of this cluster
|
||||||
|
* definition
|
||||||
|
* @returns {Kinetic} this - message structure following the kinetic
|
||||||
|
* protocol
|
||||||
|
*/
|
||||||
|
noOp(incrementTCP, clusterVersion) {
|
||||||
|
const identity = (new Date).getTime();
|
||||||
|
return this.setCommand({
|
||||||
|
"header": {
|
||||||
|
"messageType": "NOOP",
|
||||||
|
"connectionID": identity,
|
||||||
|
"sequence": incrementTCP,
|
||||||
|
"clusterVersion": clusterVersion,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Response for the NOOP request following the kinetic protocol.
|
||||||
|
* @param {String or number} response - response code (SUCCESS, FAIL)
|
||||||
|
* @param {String or Buffer} errorMessage - detailed error message.
|
||||||
|
* @returns {Kinetic} this - message structure following the kinetic
|
||||||
|
* protocol
|
||||||
|
*/
|
||||||
|
noOpResponse(response, errorMessage) {
|
||||||
|
return this.setCommand({
|
||||||
|
"header": {
|
||||||
|
"messageType": "NOOP_RESPONSE",
|
||||||
|
"ackSequence": this.getProtobuf().header.sequence,
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"code": response,
|
||||||
|
"detailedMessage": errorMessage,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PUT request following the kinetic protocol.
|
||||||
|
* @param {String or Buffer} key - key of the item to put.
|
||||||
|
* @param {number} incrementTCP - monotonically increasing number for each
|
||||||
|
* request in a TCP connection
|
||||||
|
* @param {String or Buffer} dbVersion - version of the item in the
|
||||||
|
* database.
|
||||||
|
* @param {String or Buffer} newVersion - new version of the item to put.
|
||||||
|
* @param {number} clusterVersion - The version number of this cluster
|
||||||
|
* definition
|
||||||
|
* @returns {Kinetic} this - message structure following the kinetic
|
||||||
|
* protocol
|
||||||
|
*/
|
||||||
|
put(key, incrementTCP, dbVersion, newVersion, clusterVersion) {
|
||||||
|
const identity = (new Date).getTime();
|
||||||
|
return this.setCommand({
|
||||||
|
"header": {
|
||||||
|
"messageType": "PUT",
|
||||||
|
"connectionID": identity,
|
||||||
|
"sequence": incrementTCP,
|
||||||
|
"clusterVersion": clusterVersion,
|
||||||
|
},
|
||||||
|
"body": {
|
||||||
|
"keyValue": {
|
||||||
|
"key": key,
|
||||||
|
"newVersion": newVersion,
|
||||||
|
"dbVersion": dbVersion,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Response for the PUT request following the kinetic protocol.
|
||||||
|
* @param {String or number} response - response code (SUCCESS, FAIL)
|
||||||
|
* @param {String or Buffer} errorMessage - detailed error message.
|
||||||
|
* @returns {Kinetic} this - message structure following the kinetic
|
||||||
|
* protocol
|
||||||
|
*/
|
||||||
|
putResponse(response, errorMessage) {
|
||||||
|
return this.setCommand({
|
||||||
|
"header": {
|
||||||
|
"messageType": "PUT_RESPONSE",
|
||||||
|
"ackSequence": this.getProtobuf().header.sequence,
|
||||||
|
},
|
||||||
|
"body": {
|
||||||
|
"keyValue": { },
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"code": response,
|
||||||
|
"detailedMessage": errorMessage,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET request following the kinetic protocol.
|
||||||
|
* @param {String or Buffer} key - key of the item to put.
|
||||||
|
* @param {number} incrementTCP - monotonically increasing number for each
|
||||||
|
* request in a TCP connection
|
||||||
|
* @param {number} clusterVersion - The version number of this cluster
|
||||||
|
* definition
|
||||||
|
* @returns {Kinetic} this - message structure following the kinetic
|
||||||
|
* protocol
|
||||||
|
*/
|
||||||
|
get(key, incrementTCP, clusterVersion) {
|
||||||
|
const identity = (new Date).getTime();
|
||||||
|
return this.setCommand({
|
||||||
|
"header": {
|
||||||
|
"messageType": "GET",
|
||||||
|
"connectionID": identity,
|
||||||
|
"sequence": incrementTCP,
|
||||||
|
"clusterVersion": clusterVersion,
|
||||||
|
},
|
||||||
|
"body": {
|
||||||
|
"keyValue": {
|
||||||
|
"key": key,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Response for the GET request following the kinetic protocol.
|
||||||
|
* @param {String or number} response - response code (SUCCESS, FAIL)
|
||||||
|
* @param {String or Buffer} errorMessage - Detailed error message.
|
||||||
|
* @param {String or Buffer} dbVersion - The version of the item in the
|
||||||
|
* database.
|
||||||
|
* @returns {Kinetic} this - message structure following the kinetic
|
||||||
|
* protocol
|
||||||
|
*/
|
||||||
|
getResponse(response, errorMessage, dbVersion) {
|
||||||
|
return this.setCommand({
|
||||||
|
"header": {
|
||||||
|
"messageType": "GET_RESPONSE",
|
||||||
|
"ackSequence": this.getProtobuf().header.sequence,
|
||||||
|
},
|
||||||
|
"body": {
|
||||||
|
"keyValue": {
|
||||||
|
"key": this.getProtobuf().body.keyValue.key,
|
||||||
|
"dbVersion": dbVersion,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"code": response,
|
||||||
|
"detailedMessage": errorMessage,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DELETE request following the kinetic protocol.
|
||||||
|
* @param {String or Buffer} key - key of the item to put.
|
||||||
|
* @param {number} incrementTCP - monotonically increasing number for each
|
||||||
|
* request in a TCP connection
|
||||||
|
* @param {number} clusterVersion - The version number of this cluster
|
||||||
|
* definition
|
||||||
|
* @returns {Kinetic} this - message structure following the kinetic
|
||||||
|
* protocol
|
||||||
|
*/
|
||||||
|
delete(key, incrementTCP, clusterVersion) {
|
||||||
|
const identity = (new Date).getTime();
|
||||||
|
return this.setCommand({
|
||||||
|
"header": {
|
||||||
|
"messageType": "DELETE",
|
||||||
|
"connectionID": identity,
|
||||||
|
"sequence": incrementTCP,
|
||||||
|
"clusterVersion": clusterVersion,
|
||||||
|
},
|
||||||
|
"body": {
|
||||||
|
"keyValue": {
|
||||||
|
"key": key,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Response for the DELETE request following the kinetic protocol.
|
||||||
|
* @param {String or number} response - response code (SUCCESS, FAIL)
|
||||||
|
* @param {String or Buffer} errorMessage - Detailed error message.
|
||||||
|
* @returns {Kinetic} this - message structure following the kinetic
|
||||||
|
* protocol
|
||||||
|
*/
|
||||||
|
deleteResponse(response, errorMessage) {
|
||||||
|
return this.setCommand({
|
||||||
|
"header": {
|
||||||
|
"messageType": "DELETE_RESPONSE",
|
||||||
|
"ackSequence": this.getProtobuf().header.sequence,
|
||||||
|
},
|
||||||
|
"body": {
|
||||||
|
"keyValue": { },
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"code": response,
|
||||||
|
"detailedMessage": errorMessage,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sends data following Kinetic protocol.
|
||||||
|
* @param {Socket} sock - Socket to send data through.
|
||||||
|
*/
|
||||||
|
send(sock) {
|
||||||
|
const buf = new Buffer(9);
|
||||||
|
|
||||||
|
buf.writeInt8(this.getVersion(), 0);
|
||||||
|
|
||||||
|
// BE stands for Big Endian
|
||||||
|
buf.writeInt32BE(this.getProtobufSize(), 1);
|
||||||
|
buf.writeInt32BE(this.getChunkSize(), 5);
|
||||||
|
|
||||||
|
sock.write(Buffer.concat(
|
||||||
|
[buf, this.getProtobuf().toBuffer(), this.getChunk()]
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates the Kinetic Protocol Data Structure from a buffer.
|
||||||
|
* @param {Buffer} data - The data received by the socket.
|
||||||
|
*/
|
||||||
|
parse(data) {
|
||||||
|
const version = data.readInt8(0);
|
||||||
|
const pbMsgLen = data.readInt32BE(1);
|
||||||
|
const chunkLen = data.readInt32BE(5);
|
||||||
|
|
||||||
|
if (version !== this.getVersion()) {
|
||||||
|
return this.errors.VERSION_FAILURE;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
this.setProtobuf(
|
||||||
|
this.getCommand().decode(data.slice(9, pbMsgLen + 9))
|
||||||
|
);
|
||||||
|
this.setChunk(data.slice(pbMsgLen + 9, chunkLen + pbMsgLen + 9));
|
||||||
|
} catch (e) {
|
||||||
|
return e;
|
||||||
|
}
|
||||||
|
if (this.getChunkSize() !== chunkLen) {
|
||||||
|
return this.errors.DATA_ERROR;
|
||||||
|
}
|
||||||
|
return this.errors.SUCCESS;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default Kinetic;
|
||||||
|
</code></pre>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<footer class="footer">
|
||||||
|
Generated by <a href="https://esdoc.org">ESDoc<span data-ice="esdocVersion">(0.4.1)</span></a>
|
||||||
|
</footer>
|
||||||
|
|
||||||
|
<script src="script/search_index.js"></script>
|
||||||
|
<script src="script/search.js"></script>
|
||||||
|
<script src="script/pretty-print.js"></script>
|
||||||
|
<script src="script/inherited-summary.js"></script>
|
||||||
|
<script src="script/test-summary.js"></script>
|
||||||
|
<script src="script/inner-link.js"></script>
|
||||||
|
<script src="script/patch-for-local.js"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
|
@ -1,28 +0,0 @@
|
||||||
{
|
|
||||||
"groups": {
|
|
||||||
"default": {
|
|
||||||
"packages": [
|
|
||||||
"lib/executables/pensieveCreds/package.json",
|
|
||||||
"package.json"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"branchPrefix": "improvement/greenkeeper.io/",
|
|
||||||
"commitMessages": {
|
|
||||||
"initialBadge": "docs(readme): add Greenkeeper badge",
|
|
||||||
"initialDependencies": "chore(package): update dependencies",
|
|
||||||
"initialBranches": "chore(bert-e): whitelist greenkeeper branches",
|
|
||||||
"dependencyUpdate": "fix(package): update ${dependency} to version ${version}",
|
|
||||||
"devDependencyUpdate": "chore(package): update ${dependency} to version ${version}",
|
|
||||||
"dependencyPin": "fix: pin ${dependency} to ${oldVersionResolved}",
|
|
||||||
"devDependencyPin": "chore: pin ${dependency} to ${oldVersionResolved}",
|
|
||||||
"closes": "\n\nCloses #${number}"
|
|
||||||
},
|
|
||||||
"ignore": [
|
|
||||||
"ajv",
|
|
||||||
"eslint",
|
|
||||||
"eslint-plugin-react",
|
|
||||||
"eslint-config-airbnb",
|
|
||||||
"eslint-config-scality"
|
|
||||||
]
|
|
||||||
}
|
|
|
@ -0,0 +1,125 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<base data-ice="baseUrl">
|
||||||
|
<title data-ice="title">Index | API Document</title>
|
||||||
|
<link type="text/css" rel="stylesheet" href="css/style.css">
|
||||||
|
<link type="text/css" rel="stylesheet" href="css/prettify-tomorrow.css">
|
||||||
|
<script src="script/prettify/prettify.js"></script>
|
||||||
|
|
||||||
|
|
||||||
|
<script src="script/manual.js"></script>
|
||||||
|
</head>
|
||||||
|
<body class="layout-container" data-ice="rootContainer">
|
||||||
|
|
||||||
|
<header>
|
||||||
|
<a href="./">Home</a>
|
||||||
|
|
||||||
|
<a href="identifiers.html">Reference</a>
|
||||||
|
<a href="source.html">Source</a>
|
||||||
|
|
||||||
|
<a data-ice="repoURL" href="git+https://github.com/scality/IronMan-Arsenal.git">Repository</a>
|
||||||
|
<div class="search-box">
|
||||||
|
<span>
|
||||||
|
<img src="./image/search.png">
|
||||||
|
<span class="search-input-edge"></span><input class="search-input"><span class="search-input-edge"></span>
|
||||||
|
</span>
|
||||||
|
<ul class="search-result"></ul>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<nav class="navigation" data-ice="nav"><div>
|
||||||
|
<ul>
|
||||||
|
|
||||||
|
<li data-ice="doc"><span data-ice="kind" class="kind-class">C</span><span data-ice="name"><span><a href="class/kinetic/Kinetic.js~Kinetic.html">Kinetic</a></span></span></li>
|
||||||
|
<li data-ice="doc"><span data-ice="kind" class="kind-variable">V</span><span data-ice="name"><span><a href="variable/index.html#static-variable-kinetic">kinetic</a></span></span></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
<div class="content" data-ice="content"><h1>References</h1>
|
||||||
|
<div data-ice="classSummary"><h2 id="class">Class Summary</h2><table class="summary" data-ice="summary">
|
||||||
|
<thead><tr><td data-ice="title" colspan="3">Static Public Class Summary</td></tr></thead>
|
||||||
|
<tbody>
|
||||||
|
|
||||||
|
<tr data-ice="target">
|
||||||
|
<td>
|
||||||
|
<span class="access" data-ice="access">public</span>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<span class="override" data-ice="override"></span>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<div>
|
||||||
|
<p>
|
||||||
|
<span data-ice="name"><span><a href="class/kinetic/Kinetic.js~Kinetic.html">Kinetic</a></span></span>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
|
||||||
|
|
||||||
|
<div data-ice="description"><p>Represents the Kinetic Protocol Data Structure.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
<div data-ice="variableSummary"><h2 id="variable">Variable Summary</h2><table class="summary" data-ice="summary">
|
||||||
|
<thead><tr><td data-ice="title" colspan="3">Static Public Variable Summary</td></tr></thead>
|
||||||
|
<tbody>
|
||||||
|
|
||||||
|
<tr data-ice="target">
|
||||||
|
<td>
|
||||||
|
<span class="access" data-ice="access">public</span>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<span class="override" data-ice="override"></span>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<div>
|
||||||
|
<p>
|
||||||
|
<span data-ice="name"><span><a href="variable/index.html#static-variable-kinetic">kinetic</a></span></span><span data-ice="signature">: <span><a href="class/kinetic/Kinetic.js~Kinetic.html">Kinetic</a></span></span>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<footer class="footer">
|
||||||
|
Generated by <a href="https://esdoc.org">ESDoc<span data-ice="esdocVersion">(0.4.1)</span></a>
|
||||||
|
</footer>
|
||||||
|
|
||||||
|
<script src="script/search_index.js"></script>
|
||||||
|
<script src="script/search.js"></script>
|
||||||
|
<script src="script/pretty-print.js"></script>
|
||||||
|
<script src="script/inherited-summary.js"></script>
|
||||||
|
<script src="script/test-summary.js"></script>
|
||||||
|
<script src="script/inner-link.js"></script>
|
||||||
|
<script src="script/patch-for-local.js"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
|
@ -0,0 +1,17 @@
|
||||||
|
<svg xmlns="http://www.w3.org/2000/svg" width="102" height="20">
|
||||||
|
<script/>
|
||||||
|
<linearGradient id="a" x2="0" y2="100%">
|
||||||
|
<stop offset="0" stop-color="#bbb" stop-opacity=".1"/>
|
||||||
|
<stop offset="1" stop-opacity=".1"/>
|
||||||
|
</linearGradient>
|
||||||
|
<rect rx="3" width="102" height="20" fill="#555"/>
|
||||||
|
<rect rx="3" x="64" width="38" height="20" fill="@color@"/>
|
||||||
|
<path fill="@color@" d="M64 0h4v20h-4z"/>
|
||||||
|
<rect rx="3" width="102" height="20" fill="url(#a)"/>
|
||||||
|
<g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11">
|
||||||
|
<text x="32" y="15" fill="#010101" fill-opacity=".3">document</text>
|
||||||
|
<text x="32" y="14">document</text>
|
||||||
|
<text x="82.5" y="15" fill="#010101" fill-opacity=".3">@ratio@</text>
|
||||||
|
<text x="82.5" y="14">@ratio@</text>
|
||||||
|
</g>
|
||||||
|
</svg>
|
After Width: | Height: | Size: 803 B |
After Width: | Height: | Size: 4.2 KiB |
After Width: | Height: | Size: 12 KiB |
|
@ -0,0 +1,63 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<base data-ice="baseUrl">
|
||||||
|
<title data-ice="title">API Document</title>
|
||||||
|
<link type="text/css" rel="stylesheet" href="css/style.css">
|
||||||
|
<link type="text/css" rel="stylesheet" href="css/prettify-tomorrow.css">
|
||||||
|
<script src="script/prettify/prettify.js"></script>
|
||||||
|
|
||||||
|
|
||||||
|
<script src="script/manual.js"></script>
|
||||||
|
</head>
|
||||||
|
<body class="layout-container" data-ice="rootContainer">
|
||||||
|
|
||||||
|
<header>
|
||||||
|
<a href="./">Home</a>
|
||||||
|
|
||||||
|
<a href="identifiers.html">Reference</a>
|
||||||
|
<a href="source.html">Source</a>
|
||||||
|
|
||||||
|
<a data-ice="repoURL" href="git+https://github.com/scality/IronMan-Arsenal.git">Repository</a>
|
||||||
|
<div class="search-box">
|
||||||
|
<span>
|
||||||
|
<img src="./image/search.png">
|
||||||
|
<span class="search-input-edge"></span><input class="search-input"><span class="search-input-edge"></span>
|
||||||
|
</span>
|
||||||
|
<ul class="search-result"></ul>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<nav class="navigation" data-ice="nav"><div>
|
||||||
|
<ul>
|
||||||
|
|
||||||
|
<li data-ice="doc"><span data-ice="kind" class="kind-class">C</span><span data-ice="name"><span><a href="class/kinetic/Kinetic.js~Kinetic.html">Kinetic</a></span></span></li>
|
||||||
|
<li data-ice="doc"><span data-ice="kind" class="kind-variable">V</span><span data-ice="name"><span><a href="variable/index.html#static-variable-kinetic">kinetic</a></span></span></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
<div class="content" data-ice="content"><div data-ice="index" class="github-markdown"><h1 id="ironman-arsenal">IronMan-Arsenal</h1>
|
||||||
|
<p>Common utilities for the IronMan project components</p>
|
||||||
|
<p>Within this repository, you will be able to find the shared libraries for the
|
||||||
|
multiple components making up the whole Project.</p>
|
||||||
|
<h2 id="guidelines">Guidelines</h2>
|
||||||
|
<p>Please read our coding and workflow guidelines at
|
||||||
|
<a href="https://github.com/scality/IronMan-Guidelines">scality/IronMan-Guidelines</a>.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<footer class="footer">
|
||||||
|
Generated by <a href="https://esdoc.org">ESDoc<span data-ice="esdocVersion">(0.4.1)</span></a>
|
||||||
|
</footer>
|
||||||
|
|
||||||
|
<script src="script/search_index.js"></script>
|
||||||
|
<script src="script/search.js"></script>
|
||||||
|
<script src="script/pretty-print.js"></script>
|
||||||
|
<script src="script/inherited-summary.js"></script>
|
||||||
|
<script src="script/test-summary.js"></script>
|
||||||
|
<script src="script/inner-link.js"></script>
|
||||||
|
<script src="script/patch-for-local.js"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
175
index.ts
|
@ -1,175 +0,0 @@
|
||||||
import * as evaluators from './lib/policyEvaluator/evaluator';
|
|
||||||
import evaluatePrincipal from './lib/policyEvaluator/principal';
|
|
||||||
import RequestContext, {
|
|
||||||
actionNeedQuotaCheck,
|
|
||||||
actionNeedQuotaCheckCopy,
|
|
||||||
actionWithDataDeletion } from './lib/policyEvaluator/RequestContext';
|
|
||||||
import * as requestUtils from './lib/policyEvaluator/requestUtils';
|
|
||||||
import * as actionMaps from './lib/policyEvaluator/utils/actionMaps';
|
|
||||||
import { validateUserPolicy } from './lib/policy/policyValidator'
|
|
||||||
import * as locationConstraints from './lib/patches/locationConstraints';
|
|
||||||
import * as userMetadata from './lib/s3middleware/userMetadata';
|
|
||||||
import convertToXml from './lib/s3middleware/convertToXml';
|
|
||||||
import escapeForXml from './lib/s3middleware/escapeForXml';
|
|
||||||
import * as objectLegalHold from './lib/s3middleware/objectLegalHold';
|
|
||||||
import * as tagging from './lib/s3middleware/tagging';
|
|
||||||
import { checkDateModifiedHeaders } from './lib/s3middleware/validateConditionalHeaders';
|
|
||||||
import { validateConditionalHeaders } from './lib/s3middleware/validateConditionalHeaders';
|
|
||||||
import MD5Sum from './lib/s3middleware/MD5Sum';
|
|
||||||
import NullStream from './lib/s3middleware/nullStream';
|
|
||||||
import * as objectUtils from './lib/s3middleware/objectUtils';
|
|
||||||
import * as mpuUtils from './lib/s3middleware/azureHelpers/mpuUtils';
|
|
||||||
import ResultsCollector from './lib/s3middleware/azureHelpers/ResultsCollector';
|
|
||||||
import SubStreamInterface from './lib/s3middleware/azureHelpers/SubStreamInterface';
|
|
||||||
import { prepareStream } from './lib/s3middleware/prepareStream';
|
|
||||||
import * as processMpuParts from './lib/s3middleware/processMpuParts';
|
|
||||||
import * as retention from './lib/s3middleware/objectRetention';
|
|
||||||
import * as objectRestore from './lib/s3middleware/objectRestore';
|
|
||||||
import * as lifecycleHelpers from './lib/s3middleware/lifecycleHelpers';
|
|
||||||
export { default as errors } from './lib/errors';
|
|
||||||
export { default as Clustering } from './lib/Clustering';
|
|
||||||
export * as ClusterRPC from './lib/clustering/ClusterRPC';
|
|
||||||
export * as ipCheck from './lib/ipCheck';
|
|
||||||
export * as auth from './lib/auth/auth';
|
|
||||||
export * as constants from './lib/constants';
|
|
||||||
export * as https from './lib/https';
|
|
||||||
export * as metrics from './lib/metrics';
|
|
||||||
export * as network from './lib/network';
|
|
||||||
export * as s3routes from './lib/s3routes';
|
|
||||||
export * as versioning from './lib/versioning';
|
|
||||||
export * as stream from './lib/stream';
|
|
||||||
export * as jsutil from './lib/jsutil';
|
|
||||||
export { default as stringHash } from './lib/stringHash';
|
|
||||||
export * as db from './lib/db';
|
|
||||||
export * as errorUtils from './lib/errorUtils';
|
|
||||||
export { default as shuffle } from './lib/shuffle';
|
|
||||||
export * as models from './lib/models';
|
|
||||||
|
|
||||||
export const algorithms = {
|
|
||||||
list: require('./lib/algos/list/exportAlgos'),
|
|
||||||
listTools: {
|
|
||||||
DelimiterTools: require('./lib/algos/list/tools'),
|
|
||||||
Skip: require('./lib/algos/list/skip'),
|
|
||||||
},
|
|
||||||
cache: {
|
|
||||||
GapSet: require('./lib/algos/cache/GapSet'),
|
|
||||||
GapCache: require('./lib/algos/cache/GapCache'),
|
|
||||||
LRUCache: require('./lib/algos/cache/LRUCache'),
|
|
||||||
},
|
|
||||||
stream: {
|
|
||||||
MergeStream: require('./lib/algos/stream/MergeStream'),
|
|
||||||
},
|
|
||||||
SortedSet: require('./lib/algos/set/SortedSet'),
|
|
||||||
Heap: require('./lib/algos/heap/Heap'),
|
|
||||||
};
|
|
||||||
|
|
||||||
export const policies = {
|
|
||||||
evaluators,
|
|
||||||
validateUserPolicy,
|
|
||||||
evaluatePrincipal,
|
|
||||||
RequestContext,
|
|
||||||
requestUtils,
|
|
||||||
actionMaps,
|
|
||||||
actionNeedQuotaCheck,
|
|
||||||
actionWithDataDeletion,
|
|
||||||
actionNeedQuotaCheckCopy,
|
|
||||||
};
|
|
||||||
|
|
||||||
export const testing = {
|
|
||||||
matrix: require('./lib/testing/matrix.js'),
|
|
||||||
};
|
|
||||||
|
|
||||||
export const s3middleware = {
|
|
||||||
userMetadata,
|
|
||||||
convertToXml,
|
|
||||||
escapeForXml,
|
|
||||||
objectLegalHold,
|
|
||||||
tagging,
|
|
||||||
checkDateModifiedHeaders,
|
|
||||||
validateConditionalHeaders,
|
|
||||||
MD5Sum,
|
|
||||||
NullStream,
|
|
||||||
objectUtils,
|
|
||||||
azureHelper: {
|
|
||||||
mpuUtils,
|
|
||||||
ResultsCollector,
|
|
||||||
SubStreamInterface,
|
|
||||||
},
|
|
||||||
prepareStream,
|
|
||||||
processMpuParts,
|
|
||||||
retention,
|
|
||||||
objectRestore,
|
|
||||||
lifecycleHelpers,
|
|
||||||
};
|
|
||||||
|
|
||||||
export const storage = {
|
|
||||||
metadata: {
|
|
||||||
MetadataWrapper: require('./lib/storage/metadata/MetadataWrapper'),
|
|
||||||
bucketclient: {
|
|
||||||
BucketClientInterface:
|
|
||||||
require('./lib/storage/metadata/bucketclient/' +
|
|
||||||
'BucketClientInterface'),
|
|
||||||
LogConsumer:
|
|
||||||
require('./lib/storage/metadata/bucketclient/LogConsumer'),
|
|
||||||
},
|
|
||||||
file: {
|
|
||||||
BucketFileInterface:
|
|
||||||
require('./lib/storage/metadata/file/BucketFileInterface'),
|
|
||||||
MetadataFileServer:
|
|
||||||
require('./lib/storage/metadata/file/MetadataFileServer'),
|
|
||||||
MetadataFileClient:
|
|
||||||
require('./lib/storage/metadata/file/MetadataFileClient'),
|
|
||||||
},
|
|
||||||
inMemory: {
|
|
||||||
metastore:
|
|
||||||
require('./lib/storage/metadata/in_memory/metastore'),
|
|
||||||
metadata: require('./lib/storage/metadata/in_memory/metadata'),
|
|
||||||
bucketUtilities:
|
|
||||||
require('./lib/storage/metadata/in_memory/bucket_utilities'),
|
|
||||||
},
|
|
||||||
mongoclient: {
|
|
||||||
MongoClientInterface:
|
|
||||||
require('./lib/storage/metadata/mongoclient/' +
|
|
||||||
'MongoClientInterface'),
|
|
||||||
LogConsumer:
|
|
||||||
require('./lib/storage/metadata/mongoclient/LogConsumer'),
|
|
||||||
},
|
|
||||||
proxy: {
|
|
||||||
Server: require('./lib/storage/metadata/proxy/Server'),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
data: {
|
|
||||||
DataWrapper: require('./lib/storage/data/DataWrapper'),
|
|
||||||
MultipleBackendGateway:
|
|
||||||
require('./lib/storage/data/MultipleBackendGateway'),
|
|
||||||
parseLC: require('./lib/storage/data/LocationConstraintParser'),
|
|
||||||
file: {
|
|
||||||
DataFileStore:
|
|
||||||
require('./lib/storage/data/file/DataFileStore'),
|
|
||||||
DataFileInterface:
|
|
||||||
require('./lib/storage/data/file/DataFileInterface'),
|
|
||||||
},
|
|
||||||
external: {
|
|
||||||
AwsClient: require('./lib/storage/data/external/AwsClient'),
|
|
||||||
AzureClient: require('./lib/storage/data/external/AzureClient'),
|
|
||||||
GcpClient: require('./lib/storage/data/external/GcpClient'),
|
|
||||||
GCP: require('./lib/storage/data/external/GCP/GcpService'),
|
|
||||||
GcpUtils: require('./lib/storage/data/external/GCP/GcpUtils'),
|
|
||||||
GcpSigner: require('./lib/storage/data/external/GCP/GcpSigner'),
|
|
||||||
PfsClient: require('./lib/storage/data/external/PfsClient'),
|
|
||||||
backendUtils: require('./lib/storage/data/external/utils'),
|
|
||||||
},
|
|
||||||
inMemory: {
|
|
||||||
datastore: require('./lib/storage/data/in_memory/datastore'),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
utils: require('./lib/storage/utils'),
|
|
||||||
};
|
|
||||||
|
|
||||||
export const pensieve = {
|
|
||||||
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
|
|
||||||
};
|
|
||||||
|
|
||||||
export const patches = {
|
|
||||||
locationConstraints,
|
|
||||||
};
|
|
|
@ -1,280 +0,0 @@
|
||||||
import cluster, { Worker } from 'cluster';
|
|
||||||
import * as werelogs from 'werelogs';
|
|
||||||
|
|
||||||
export default class Clustering {
|
|
||||||
_size: number;
|
|
||||||
_shutdownTimeout: number;
|
|
||||||
_logger: werelogs.Logger;
|
|
||||||
_shutdown: boolean;
|
|
||||||
_workers: (Worker | undefined)[];
|
|
||||||
_workersTimeout: (NodeJS.Timeout | undefined)[];
|
|
||||||
_workersStatus: (number | string | undefined)[];
|
|
||||||
_status: number;
|
|
||||||
_exitCb?: (clustering: Clustering, exitSignal?: string) => void;
|
|
||||||
_index?: number;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Constructor
|
|
||||||
*
|
|
||||||
* @param size Cluster size
|
|
||||||
* @param logger Logger object
|
|
||||||
* @param [shutdownTimeout=5000] Change default shutdown timeout
|
|
||||||
* releasing ressources
|
|
||||||
* @return itself
|
|
||||||
*/
|
|
||||||
constructor(size: number, logger: werelogs.Logger, shutdownTimeout?: number) {
|
|
||||||
this._size = size;
|
|
||||||
if (size < 1) {
|
|
||||||
throw new Error('Cluster size must be greater than or equal to 1');
|
|
||||||
}
|
|
||||||
this._shutdownTimeout = shutdownTimeout || 5000;
|
|
||||||
this._logger = logger;
|
|
||||||
this._shutdown = false;
|
|
||||||
this._workers = new Array(size).fill(undefined);
|
|
||||||
this._workersTimeout = new Array(size).fill(undefined);
|
|
||||||
this._workersStatus = new Array(size).fill(undefined);
|
|
||||||
this._status = 0;
|
|
||||||
this._exitCb = undefined; // Exit callback
|
|
||||||
this._index = undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Method called after a stop() call
|
|
||||||
*
|
|
||||||
* @private
|
|
||||||
*/
|
|
||||||
_afterStop() {
|
|
||||||
// Asuming all workers shutdown gracefully
|
|
||||||
this._status = 0;
|
|
||||||
const size = this._size;
|
|
||||||
for (let i = 0; i < size; ++i) {
|
|
||||||
// If the process return an error code or killed by a signal,
|
|
||||||
// set the status
|
|
||||||
const status = this._workersStatus[i];
|
|
||||||
if (typeof status === 'number') {
|
|
||||||
this._status = status;
|
|
||||||
break;
|
|
||||||
} else if (typeof status === 'string') {
|
|
||||||
this._status = 1;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (this._exitCb) {
|
|
||||||
return this._exitCb(this);
|
|
||||||
}
|
|
||||||
return process.exit(this.getStatus());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Method called when a worker exited
|
|
||||||
*
|
|
||||||
* @param worker - Current worker
|
|
||||||
* @param i - Worker index
|
|
||||||
* @param code - Exit code
|
|
||||||
* @param signal - Exit signal
|
|
||||||
*/
|
|
||||||
_workerExited(
|
|
||||||
worker: Worker,
|
|
||||||
i: number,
|
|
||||||
code: number,
|
|
||||||
signal: string,
|
|
||||||
) {
|
|
||||||
// If the worker:
|
|
||||||
// - was killed by a signal
|
|
||||||
// - return an error code
|
|
||||||
// - or just stopped
|
|
||||||
if (signal) {
|
|
||||||
this._logger.info('Worker killed by signal', {
|
|
||||||
signal,
|
|
||||||
id: i,
|
|
||||||
childPid: worker.process.pid,
|
|
||||||
});
|
|
||||||
this._workersStatus[i] = signal;
|
|
||||||
} else if (code !== 0) {
|
|
||||||
this._logger.error('Worker exit with code', {
|
|
||||||
code,
|
|
||||||
id: i,
|
|
||||||
childPid: worker.process.pid,
|
|
||||||
});
|
|
||||||
this._workersStatus[i] = code;
|
|
||||||
} else {
|
|
||||||
this._logger.info('Worker shutdown gracefully', {
|
|
||||||
id: i,
|
|
||||||
childPid: worker.process.pid,
|
|
||||||
});
|
|
||||||
this._workersStatus[i] = undefined;
|
|
||||||
}
|
|
||||||
this._workers[i] = undefined;
|
|
||||||
const timeout = this._workersTimeout[i];
|
|
||||||
if (timeout) {
|
|
||||||
clearTimeout(timeout);
|
|
||||||
this._workersTimeout[i] = undefined;
|
|
||||||
}
|
|
||||||
// If we don't trigger the stop method, the watchdog
|
|
||||||
// will autorestart the worker
|
|
||||||
if (this._shutdown === false) {
|
|
||||||
return process.nextTick(() => this.startWorker(i));
|
|
||||||
}
|
|
||||||
// Check if an worker is still running
|
|
||||||
if (!this._workers.every(cur => cur === undefined)) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return this._afterStop();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Method to start a worker
|
|
||||||
*
|
|
||||||
* @param i Index of the starting worker
|
|
||||||
*/
|
|
||||||
startWorker(i: number) {
|
|
||||||
if (!cluster.isPrimary) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
// Fork a new worker
|
|
||||||
this._workers[i] = cluster.fork();
|
|
||||||
// Listen for message from the worker
|
|
||||||
this._workers[i]!.on('message', msg => {
|
|
||||||
// If the worker is ready, send him his id
|
|
||||||
if (msg === 'ready') {
|
|
||||||
this._workers[i]!.send({ msg: 'setup', id: i });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
this._workers[i]!.on('exit', (code, signal) =>
|
|
||||||
this._workerExited(this._workers[i]!, i, code, signal));
|
|
||||||
// Trigger when the worker was started
|
|
||||||
this._workers[i]!.on('online', () => {
|
|
||||||
this._logger.info('Worker started', {
|
|
||||||
id: i,
|
|
||||||
childPid: this._workers[i]!.process.pid,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Method to put handler on cluster exit
|
|
||||||
*
|
|
||||||
* @param cb - Callback(Clustering, [exitSignal])
|
|
||||||
* @return Itself
|
|
||||||
*/
|
|
||||||
onExit(cb: (clustering: Clustering, exitSignal?: string) => void) {
|
|
||||||
this._exitCb = cb;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Method to start the cluster (if master) or to start the callback
|
|
||||||
* (worker)
|
|
||||||
*
|
|
||||||
* @param cb - Callback to run the worker
|
|
||||||
* @return itself
|
|
||||||
*/
|
|
||||||
start(cb: (clustering: Clustering) => void) {
|
|
||||||
process.on('SIGINT', () => this.stop('SIGINT'));
|
|
||||||
process.on('SIGHUP', () => this.stop('SIGHUP'));
|
|
||||||
process.on('SIGQUIT', () => this.stop('SIGQUIT'));
|
|
||||||
process.on('SIGTERM', () => this.stop('SIGTERM'));
|
|
||||||
process.on('SIGPIPE', () => {});
|
|
||||||
process.on('exit', (code?: number, signal?: string) => {
|
|
||||||
if (this._exitCb) {
|
|
||||||
this._status = code || 0;
|
|
||||||
return this._exitCb(this, signal);
|
|
||||||
}
|
|
||||||
return process.exit(code || 0);
|
|
||||||
});
|
|
||||||
process.on('uncaughtException', (err: Error) => {
|
|
||||||
this._logger.fatal('caught error', {
|
|
||||||
error: err.message,
|
|
||||||
stack: err.stack?.split('\n')?.map(str => str.trim()),
|
|
||||||
});
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
if (!cluster.isPrimary) {
|
|
||||||
// Waiting for message from master to
|
|
||||||
// know the id of the slave cluster
|
|
||||||
process.on('message', (msg: any) => {
|
|
||||||
if (msg.msg === 'setup') {
|
|
||||||
this._index = msg.id;
|
|
||||||
cb(this);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
// Send message to the master, to let him know
|
|
||||||
// the worker has started
|
|
||||||
process.send?.('ready');
|
|
||||||
} else {
|
|
||||||
for (let i = 0; i < this._size; ++i) {
|
|
||||||
this.startWorker(i);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Method to get workers
|
|
||||||
*
|
|
||||||
* @return Workers
|
|
||||||
*/
|
|
||||||
getWorkers() {
|
|
||||||
return this._workers;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Method to get the status of the cluster
|
|
||||||
*
|
|
||||||
* @return Status code
|
|
||||||
*/
|
|
||||||
getStatus() {
|
|
||||||
return this._status;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Method to return if it's the master process
|
|
||||||
*
|
|
||||||
* @return - True if master, false otherwise
|
|
||||||
*/
|
|
||||||
isMaster() {
|
|
||||||
return this._index === undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Method to get index of the worker
|
|
||||||
*
|
|
||||||
* @return Worker index, undefined if it's master
|
|
||||||
*/
|
|
||||||
getIndex() {
|
|
||||||
return this._index;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Method to stop the cluster
|
|
||||||
*
|
|
||||||
* @param signal - Set internally when processes killed by signal
|
|
||||||
*/
|
|
||||||
stop(signal?: string) {
|
|
||||||
if (!cluster.isPrimary) {
|
|
||||||
if (this._exitCb) {
|
|
||||||
return this._exitCb(this, signal);
|
|
||||||
}
|
|
||||||
return process.exit(0);
|
|
||||||
}
|
|
||||||
this._shutdown = true;
|
|
||||||
return this._workers.forEach((worker, i) => {
|
|
||||||
if (!worker) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
this._workersTimeout[i] = setTimeout(() => {
|
|
||||||
// Kill the worker if the sigterm was ignored or take too long
|
|
||||||
if (worker.process.pid) {
|
|
||||||
process.kill(worker.process.pid, 'SIGKILL');
|
|
||||||
}
|
|
||||||
}, this._shutdownTimeout);
|
|
||||||
// Send sigterm to the process, allowing to release ressources
|
|
||||||
// and save some states
|
|
||||||
if (worker.process.pid) {
|
|
||||||
return process.kill(worker.process.pid, 'SIGTERM');
|
|
||||||
} else {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,363 +0,0 @@
|
||||||
import { OrderedSet } from '@js-sdsl/ordered-set';
|
|
||||||
import {
|
|
||||||
default as GapSet,
|
|
||||||
GapSetEntry,
|
|
||||||
} from './GapSet';
|
|
||||||
|
|
||||||
// the API is similar but is not strictly a superset of GapSetInterface
|
|
||||||
// so we don't extend from it
|
|
||||||
export interface GapCacheInterface {
|
|
||||||
exposureDelayMs: number;
|
|
||||||
maxGapWeight: number;
|
|
||||||
size: number;
|
|
||||||
|
|
||||||
setGap: (firstKey: string, lastKey: string, weight: number) => void;
|
|
||||||
removeOverlappingGaps: (overlappingKeys: string[]) => number;
|
|
||||||
lookupGap: (minKey: string, maxKey?: string) => Promise<GapSetEntry | null>;
|
|
||||||
[Symbol.iterator]: () => Iterator<GapSetEntry>;
|
|
||||||
toArray: () => GapSetEntry[];
|
|
||||||
};
|
|
||||||
|
|
||||||
class GapCacheUpdateSet {
|
|
||||||
newGaps: GapSet;
|
|
||||||
updatedKeys: OrderedSet<string>;
|
|
||||||
|
|
||||||
constructor(maxGapWeight: number) {
|
|
||||||
this.newGaps = new GapSet(maxGapWeight);
|
|
||||||
this.updatedKeys = new OrderedSet();
|
|
||||||
}
|
|
||||||
|
|
||||||
addUpdateBatch(updatedKeys: OrderedSet<string>): void {
|
|
||||||
this.updatedKeys.union(updatedKeys);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Cache of listing "gaps" i.e. ranges of keys that can be skipped
|
|
||||||
* over during listing (because they only contain delete markers as
|
|
||||||
* latest versions).
|
|
||||||
*
|
|
||||||
* Typically, a single GapCache instance would be attached to a raft session.
|
|
||||||
*
|
|
||||||
* The API usage is as follows:
|
|
||||||
*
|
|
||||||
* - Initialize a GapCache instance by calling start() (this starts an internal timer)
|
|
||||||
*
|
|
||||||
* - Insert a gap or update an existing one via setGap()
|
|
||||||
*
|
|
||||||
* - Lookup existing gaps via lookupGap()
|
|
||||||
*
|
|
||||||
* - Invalidate gaps that overlap a specific set of keys via removeOverlappingGaps()
|
|
||||||
*
|
|
||||||
* - Shut down a GapCache instance by calling stop() (this stops the internal timer)
|
|
||||||
*
|
|
||||||
* Gaps inserted via setGap() are not exposed immediately to lookupGap(), but only:
|
|
||||||
*
|
|
||||||
* - after a certain delay always larger than 'exposureDelayMs' and usually shorter
|
|
||||||
* than twice this value (but might be slightly longer in rare cases)
|
|
||||||
*
|
|
||||||
* - and only if they haven't been invalidated by a recent call to removeOverlappingGaps()
|
|
||||||
*
|
|
||||||
* This ensures atomicity between gap creation and invalidation from updates under
|
|
||||||
* the condition that a gap is created from first key to last key within the time defined
|
|
||||||
* by 'exposureDelayMs'.
|
|
||||||
*
|
|
||||||
* The implementation is based on two extra temporary "update sets" on top of the main
|
|
||||||
* exposed gap set, one called "staging" and the other "frozen", each containing a
|
|
||||||
* temporary updated gap set and a list of updated keys to invalidate gaps with (coming
|
|
||||||
* from calls to removeOverlappingGaps()). Every "exposureDelayMs" milliseconds, the frozen
|
|
||||||
* gaps are invalidated by all key updates coming from either of the "staging" or "frozen"
|
|
||||||
* update set, then merged into the exposed gaps set, after which the staging updates become
|
|
||||||
* the frozen updates and won't receive any new gap until the next cycle.
|
|
||||||
*/
|
|
||||||
export default class GapCache implements GapCacheInterface {
|
|
||||||
_exposureDelayMs: number;
|
|
||||||
maxGaps: number;
|
|
||||||
|
|
||||||
_stagingUpdates: GapCacheUpdateSet;
|
|
||||||
_frozenUpdates: GapCacheUpdateSet;
|
|
||||||
_exposedGaps: GapSet;
|
|
||||||
_exposeFrozenInterval: NodeJS.Timeout | null;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
*
|
|
||||||
* @param {number} exposureDelayMs - minimum delay between
|
|
||||||
* insertion of a gap via setGap() and its exposure via
|
|
||||||
* lookupGap()
|
|
||||||
* @param {number} maxGaps - maximum number of cached gaps, after
|
|
||||||
* which no new gap can be added by setGap(). (Note: a future
|
|
||||||
* improvement could replace this by an eviction strategy)
|
|
||||||
* @param {number} maxGapWeight - maximum "weight" of individual
|
|
||||||
* cached gaps, which is also the granularity for
|
|
||||||
* invalidation. Individual gaps can be chained together,
|
|
||||||
* which lookupGap() transparently consolidates in the response
|
|
||||||
* into a single large gap.
|
|
||||||
*/
|
|
||||||
constructor(exposureDelayMs: number, maxGaps: number, maxGapWeight: number) {
|
|
||||||
this._exposureDelayMs = exposureDelayMs;
|
|
||||||
this.maxGaps = maxGaps;
|
|
||||||
|
|
||||||
this._stagingUpdates = new GapCacheUpdateSet(maxGapWeight);
|
|
||||||
this._frozenUpdates = new GapCacheUpdateSet(maxGapWeight);
|
|
||||||
this._exposedGaps = new GapSet(maxGapWeight);
|
|
||||||
this._exposeFrozenInterval = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a GapCache from an array of exposed gap entries (used in tests)
|
|
||||||
*
|
|
||||||
* @return {GapCache} - a new GapCache instance
|
|
||||||
*/
|
|
||||||
static createFromArray(
|
|
||||||
gaps: GapSetEntry[],
|
|
||||||
exposureDelayMs: number,
|
|
||||||
maxGaps: number,
|
|
||||||
maxGapWeight: number
|
|
||||||
): GapCache {
|
|
||||||
const gapCache = new GapCache(exposureDelayMs, maxGaps, maxGapWeight);
|
|
||||||
gapCache._exposedGaps = GapSet.createFromArray(gaps, maxGapWeight)
|
|
||||||
return gapCache;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Internal helper to remove gaps in the staging and frozen sets
|
|
||||||
* overlapping with previously updated keys, right before the
|
|
||||||
* frozen gaps get exposed.
|
|
||||||
*
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
_removeOverlappingGapsBeforeExpose(): void {
|
|
||||||
for (const { updatedKeys } of [this._stagingUpdates, this._frozenUpdates]) {
|
|
||||||
if (updatedKeys.size() === 0) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
for (const { newGaps } of [this._stagingUpdates, this._frozenUpdates]) {
|
|
||||||
if (newGaps.size === 0) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
newGaps.removeOverlappingGaps(updatedKeys);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function is the core mechanism that updates the exposed gaps in the
|
|
||||||
* cache. It is called on a regular interval defined by 'exposureDelayMs'.
|
|
||||||
*
|
|
||||||
* It does the following in order:
|
|
||||||
*
|
|
||||||
* - remove gaps from the frozen set that overlap with any key present in a
|
|
||||||
* batch passed to removeOverlappingGaps() since the last two triggers of
|
|
||||||
* _exposeFrozen()
|
|
||||||
*
|
|
||||||
* - merge the remaining gaps from the frozen set to the exposed set, which
|
|
||||||
* makes them visible from calls to lookupGap()
|
|
||||||
*
|
|
||||||
* - rotate by freezing the currently staging updates and initiating a new
|
|
||||||
* staging updates set
|
|
||||||
*
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
_exposeFrozen(): void {
|
|
||||||
this._removeOverlappingGapsBeforeExpose();
|
|
||||||
for (const gap of this._frozenUpdates.newGaps) {
|
|
||||||
// Use a trivial strategy to keep the cache size within
|
|
||||||
// limits: refuse to add new gaps when the size is above
|
|
||||||
// the 'maxGaps' threshold. We solely rely on
|
|
||||||
// removeOverlappingGaps() to make space for new gaps.
|
|
||||||
if (this._exposedGaps.size < this.maxGaps) {
|
|
||||||
this._exposedGaps.setGap(gap.firstKey, gap.lastKey, gap.weight);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this._frozenUpdates = this._stagingUpdates;
|
|
||||||
this._stagingUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Start the internal GapCache timer
|
|
||||||
*
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
start(): void {
|
|
||||||
if (this._exposeFrozenInterval) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
this._exposeFrozenInterval = setInterval(
|
|
||||||
() => this._exposeFrozen(),
|
|
||||||
this._exposureDelayMs);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Stop the internal GapCache timer
|
|
||||||
*
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
stop(): void {
|
|
||||||
if (this._exposeFrozenInterval) {
|
|
||||||
clearInterval(this._exposeFrozenInterval);
|
|
||||||
this._exposeFrozenInterval = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Record a gap between two keys, associated with a weight to
|
|
||||||
* limit individual gap's spanning ranges in the cache, for a more
|
|
||||||
* granular invalidation.
|
|
||||||
*
|
|
||||||
* The function handles splitting and merging existing gaps to
|
|
||||||
* maintain an optimal weight of cache entries.
|
|
||||||
*
|
|
||||||
* NOTE 1: the caller must ensure that the full length of the gap
|
|
||||||
* between 'firstKey' and 'lastKey' has been built from a listing
|
|
||||||
* snapshot that is more recent than 'exposureDelayMs' milliseconds,
|
|
||||||
* in order to guarantee that the exposed gap will be fully
|
|
||||||
* covered (and potentially invalidated) from recent calls to
|
|
||||||
* removeOverlappingGaps().
|
|
||||||
*
|
|
||||||
* NOTE 2: a usual pattern when building a large gap from multiple
|
|
||||||
* calls to setGap() is to start the next gap from 'lastKey',
|
|
||||||
* which will be passed as 'firstKey' in the next call, so that
|
|
||||||
* gaps can be chained together and consolidated by lookupGap().
|
|
||||||
*
|
|
||||||
* @param {string} firstKey - first key of the gap
|
|
||||||
* @param {string} lastKey - last key of the gap, must be greater
|
|
||||||
* or equal than 'firstKey'
|
|
||||||
* @param {number} weight - total weight between 'firstKey' and 'lastKey'
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
setGap(firstKey: string, lastKey: string, weight: number): void {
|
|
||||||
this._stagingUpdates.newGaps.setGap(firstKey, lastKey, weight);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove gaps that overlap with a given set of keys. Used to
|
|
||||||
* invalidate gaps when keys are inserted or deleted.
|
|
||||||
*
|
|
||||||
* @param {OrderedSet<string> | string[]} overlappingKeys - remove gaps that
|
|
||||||
* overlap with any of this set of keys
|
|
||||||
* @return {number} - how many gaps were removed from the exposed
|
|
||||||
* gaps only (overlapping gaps not yet exposed are also invalidated
|
|
||||||
* but are not accounted for in the returned value)
|
|
||||||
*/
|
|
||||||
removeOverlappingGaps(overlappingKeys: OrderedSet<string> | string[]): number {
|
|
||||||
let overlappingKeysSet;
|
|
||||||
if (Array.isArray(overlappingKeys)) {
|
|
||||||
overlappingKeysSet = new OrderedSet(overlappingKeys);
|
|
||||||
} else {
|
|
||||||
overlappingKeysSet = overlappingKeys;
|
|
||||||
}
|
|
||||||
this._stagingUpdates.addUpdateBatch(overlappingKeysSet);
|
|
||||||
return this._exposedGaps.removeOverlappingGaps(overlappingKeysSet);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Lookup the next exposed gap that overlaps with [minKey, maxKey]. Internally
|
|
||||||
* chained gaps are coalesced in the response into a single contiguous large gap.
|
|
||||||
*
|
|
||||||
* @param {string} minKey - minimum key overlapping with the returned gap
|
|
||||||
* @param {string} [maxKey] - maximum key overlapping with the returned gap
|
|
||||||
* @return {Promise<GapSetEntry | null>} - result of the lookup if a gap
|
|
||||||
* was found, null otherwise, as a Promise
|
|
||||||
*/
|
|
||||||
lookupGap(minKey: string, maxKey?: string): Promise<GapSetEntry | null> {
|
|
||||||
return this._exposedGaps.lookupGap(minKey, maxKey);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the maximum weight setting for individual gaps.
|
|
||||||
*
|
|
||||||
* @return {number} - maximum weight of individual gaps
|
|
||||||
*/
|
|
||||||
get maxGapWeight(): number {
|
|
||||||
return this._exposedGaps.maxWeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the maximum weight setting for individual gaps.
|
|
||||||
*
|
|
||||||
* @param {number} gapWeight - maximum weight of individual gaps
|
|
||||||
*/
|
|
||||||
set maxGapWeight(gapWeight: number) {
|
|
||||||
this._exposedGaps.maxWeight = gapWeight;
|
|
||||||
// also update transient gap sets
|
|
||||||
this._stagingUpdates.newGaps.maxWeight = gapWeight;
|
|
||||||
this._frozenUpdates.newGaps.maxWeight = gapWeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the exposure delay in milliseconds, which is the minimum
|
|
||||||
* time after which newly cached gaps will be exposed by
|
|
||||||
* lookupGap().
|
|
||||||
*
|
|
||||||
* @return {number} - exposure delay in milliseconds
|
|
||||||
*/
|
|
||||||
get exposureDelayMs(): number {
|
|
||||||
return this._exposureDelayMs;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the exposure delay in milliseconds, which is the minimum
|
|
||||||
* time after which newly cached gaps will be exposed by
|
|
||||||
* lookupGap(). Setting this attribute automatically updates the
|
|
||||||
* internal state to honor the new value.
|
|
||||||
*
|
|
||||||
* @param {number} - exposure delay in milliseconds
|
|
||||||
*/
|
|
||||||
set exposureDelayMs(exposureDelayMs: number) {
|
|
||||||
if (exposureDelayMs !== this._exposureDelayMs) {
|
|
||||||
this._exposureDelayMs = exposureDelayMs;
|
|
||||||
if (this._exposeFrozenInterval) {
|
|
||||||
// invalidate all pending gap updates, as the new interval may not be
|
|
||||||
// safe for them
|
|
||||||
this._stagingUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
|
||||||
this._frozenUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
|
||||||
|
|
||||||
// reinitialize the _exposeFrozenInterval timer with the updated delay
|
|
||||||
this.stop();
|
|
||||||
this.start();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the number of exposed gaps
|
|
||||||
*
|
|
||||||
* @return {number} number of exposed gaps
|
|
||||||
*/
|
|
||||||
get size(): number {
|
|
||||||
return this._exposedGaps.size;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Iterate over exposed gaps
|
|
||||||
*
|
|
||||||
* @return {Iterator<GapSetEntry>} an iterator over exposed gaps
|
|
||||||
*/
|
|
||||||
[Symbol.iterator](): Iterator<GapSetEntry> {
|
|
||||||
return this._exposedGaps[Symbol.iterator]();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get an array of all exposed gaps
|
|
||||||
*
|
|
||||||
* @return {GapSetEntry[]} array of exposed gaps
|
|
||||||
*/
|
|
||||||
toArray(): GapSetEntry[] {
|
|
||||||
return this._exposedGaps.toArray();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clear all exposed and staging gaps from the cache.
|
|
||||||
*
|
|
||||||
* Note: retains invalidating updates from removeOverlappingGaps()
|
|
||||||
* for correctness of gaps inserted afterwards.
|
|
||||||
*
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
clear(): void {
|
|
||||||
this._stagingUpdates.newGaps = new GapSet(this.maxGapWeight);
|
|
||||||
this._frozenUpdates.newGaps = new GapSet(this.maxGapWeight);
|
|
||||||
this._exposedGaps = new GapSet(this.maxGapWeight);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,366 +0,0 @@
|
||||||
import assert from 'assert';
|
|
||||||
import { OrderedSet } from '@js-sdsl/ordered-set';
|
|
||||||
|
|
||||||
import errors from '../../errors';
|
|
||||||
|
|
||||||
export type GapSetEntry = {
|
|
||||||
firstKey: string,
|
|
||||||
lastKey: string,
|
|
||||||
weight: number,
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface GapSetInterface {
|
|
||||||
maxWeight: number;
|
|
||||||
size: number;
|
|
||||||
|
|
||||||
setGap: (firstKey: string, lastKey: string, weight: number) => GapSetEntry;
|
|
||||||
removeOverlappingGaps: (overlappingKeys: string[]) => number;
|
|
||||||
lookupGap: (minKey: string, maxKey?: string) => Promise<GapSetEntry | null>;
|
|
||||||
[Symbol.iterator]: () => Iterator<GapSetEntry>;
|
|
||||||
toArray: () => GapSetEntry[];
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Specialized data structure to support caching of listing "gaps",
|
|
||||||
* i.e. ranges of keys that can be skipped over during listing
|
|
||||||
* (because they only contain delete markers as latest versions)
|
|
||||||
*/
|
|
||||||
export default class GapSet implements GapSetInterface, Iterable<GapSetEntry> {
|
|
||||||
_gaps: OrderedSet<GapSetEntry>;
|
|
||||||
_maxWeight: number;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
|
|
||||||
* @param {number} maxWeight - weight threshold for each cached
|
|
||||||
* gap (unitless). Triggers splitting gaps when reached
|
|
||||||
*/
|
|
||||||
constructor(maxWeight: number) {
|
|
||||||
this._gaps = new OrderedSet(
|
|
||||||
[],
|
|
||||||
(left: GapSetEntry, right: GapSetEntry) => (
|
|
||||||
left.firstKey < right.firstKey ? -1 :
|
|
||||||
left.firstKey > right.firstKey ? 1 : 0
|
|
||||||
)
|
|
||||||
);
|
|
||||||
this._maxWeight = maxWeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a GapSet from an array of gap entries (used in tests)
|
|
||||||
*/
|
|
||||||
static createFromArray(gaps: GapSetEntry[], maxWeight: number): GapSet {
|
|
||||||
const gapSet = new GapSet(maxWeight);
|
|
||||||
for (const gap of gaps) {
|
|
||||||
gapSet._gaps.insert(gap);
|
|
||||||
}
|
|
||||||
return gapSet;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Record a gap between two keys, associated with a weight to limit
|
|
||||||
* individual gap sizes in the cache.
|
|
||||||
*
|
|
||||||
* The function handles splitting and merging existing gaps to
|
|
||||||
* maintain an optimal weight of cache entries.
|
|
||||||
*
|
|
||||||
* @param {string} firstKey - first key of the gap
|
|
||||||
* @param {string} lastKey - last key of the gap, must be greater
|
|
||||||
* or equal than 'firstKey'
|
|
||||||
* @param {number} weight - total weight between 'firstKey' and 'lastKey'
|
|
||||||
* @return {GapSetEntry} - existing or new gap entry
|
|
||||||
*/
|
|
||||||
setGap(firstKey: string, lastKey: string, weight: number): GapSetEntry {
|
|
||||||
assert(lastKey >= firstKey);
|
|
||||||
|
|
||||||
// Step 1/4: Find the closest left-overlapping gap, and either re-use it
|
|
||||||
// or chain it with a new gap depending on the weights if it exists (otherwise
|
|
||||||
// just creates a new gap).
|
|
||||||
const curGapIt = this._gaps.reverseLowerBound(<GapSetEntry>{ firstKey });
|
|
||||||
let curGap;
|
|
||||||
if (curGapIt.isAccessible()) {
|
|
||||||
curGap = curGapIt.pointer;
|
|
||||||
if (curGap.lastKey >= lastKey) {
|
|
||||||
// return fully overlapping gap already cached
|
|
||||||
return curGap;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let remainingWeight = weight;
|
|
||||||
if (!curGap // no previous gap
|
|
||||||
|| curGap.lastKey < firstKey // previous gap not overlapping
|
|
||||||
|| (curGap.lastKey === firstKey // previous gap overlapping by one key...
|
|
||||||
&& curGap.weight + weight > this._maxWeight) // ...but we can't extend it
|
|
||||||
) {
|
|
||||||
// create a new gap indexed by 'firstKey'
|
|
||||||
curGap = { firstKey, lastKey: firstKey, weight: 0 };
|
|
||||||
this._gaps.insert(curGap);
|
|
||||||
} else if (curGap.lastKey > firstKey && weight > this._maxWeight) {
|
|
||||||
// previous gap is either fully or partially contained in the new gap
|
|
||||||
// and cannot be extended: substract its weight from the total (heuristic
|
|
||||||
// in case the previous gap doesn't start at 'firstKey', which is the
|
|
||||||
// uncommon case)
|
|
||||||
remainingWeight -= curGap.weight;
|
|
||||||
|
|
||||||
// there may be an existing chained gap starting with the previous gap's
|
|
||||||
// 'lastKey': use it if it exists
|
|
||||||
const chainedGapIt = this._gaps.find(<GapSetEntry>{ firstKey: curGap.lastKey });
|
|
||||||
if (chainedGapIt.isAccessible()) {
|
|
||||||
curGap = chainedGapIt.pointer;
|
|
||||||
} else {
|
|
||||||
// no existing chained gap: chain a new gap to the previous gap
|
|
||||||
curGap = {
|
|
||||||
firstKey: curGap.lastKey,
|
|
||||||
lastKey: curGap.lastKey,
|
|
||||||
weight: 0,
|
|
||||||
};
|
|
||||||
this._gaps.insert(curGap);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Step 2/4: Cleanup existing gaps fully included in firstKey -> lastKey, and
|
|
||||||
// aggregate their weights in curGap to define the minimum weight up to the
|
|
||||||
// last merged gap.
|
|
||||||
let nextGap;
|
|
||||||
while (true) {
|
|
||||||
const nextGapIt = this._gaps.upperBound(<GapSetEntry>{ firstKey: curGap.firstKey });
|
|
||||||
nextGap = nextGapIt.isAccessible() && nextGapIt.pointer;
|
|
||||||
// stop the cleanup when no more gap or if the next gap is not fully
|
|
||||||
// included in curGap
|
|
||||||
if (!nextGap || nextGap.lastKey > lastKey) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
this._gaps.eraseElementByIterator(nextGapIt);
|
|
||||||
curGap.lastKey = nextGap.lastKey;
|
|
||||||
curGap.weight += nextGap.weight;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 3/4: Extend curGap to lastKey, adjusting the weight.
|
|
||||||
// At this point, curGap weight is the minimum weight of the finished gap, save it
|
|
||||||
// for step 4.
|
|
||||||
let minMergedWeight = curGap.weight;
|
|
||||||
if (curGap.lastKey === firstKey && firstKey !== lastKey) {
|
|
||||||
// extend the existing gap by the full amount 'firstKey -> lastKey'
|
|
||||||
curGap.lastKey = lastKey;
|
|
||||||
curGap.weight += remainingWeight;
|
|
||||||
} else if (curGap.lastKey <= lastKey) {
|
|
||||||
curGap.lastKey = lastKey;
|
|
||||||
curGap.weight = remainingWeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 4/4: Find the closest right-overlapping gap, and if it exists, either merge
|
|
||||||
// it or chain it with curGap depending on the weights.
|
|
||||||
if (nextGap && nextGap.firstKey <= lastKey) {
|
|
||||||
// nextGap overlaps with the new gap: check if we can merge it
|
|
||||||
minMergedWeight += nextGap.weight;
|
|
||||||
let mergedWeight;
|
|
||||||
if (lastKey === nextGap.firstKey) {
|
|
||||||
// nextGap is chained with curGap: add the full weight of nextGap
|
|
||||||
mergedWeight = curGap.weight + nextGap.weight;
|
|
||||||
} else {
|
|
||||||
// strict overlap: don't add nextGap's weight unless
|
|
||||||
// it's larger than the sum of merged ranges (as it is
|
|
||||||
// then included in `minMergedWeight`)
|
|
||||||
mergedWeight = Math.max(curGap.weight, minMergedWeight);
|
|
||||||
}
|
|
||||||
if (mergedWeight <= this._maxWeight) {
|
|
||||||
// merge nextGap into curGap
|
|
||||||
curGap.lastKey = nextGap.lastKey;
|
|
||||||
curGap.weight = mergedWeight;
|
|
||||||
this._gaps.eraseElementByKey(nextGap);
|
|
||||||
} else {
|
|
||||||
// adjust the last key to chain with nextGap and substract the next
|
|
||||||
// gap's weight from curGap (heuristic)
|
|
||||||
curGap.lastKey = nextGap.firstKey;
|
|
||||||
curGap.weight = Math.max(mergedWeight - nextGap.weight, 0);
|
|
||||||
curGap = nextGap;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// return a copy of curGap
|
|
||||||
return Object.assign({}, curGap);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove gaps that overlap with one or more keys in a given array or
|
|
||||||
* OrderedSet. Used to invalidate gaps when keys are inserted or deleted.
|
|
||||||
*
|
|
||||||
* @param {OrderedSet<string> | string[]} overlappingKeys - remove gaps that overlap
|
|
||||||
* with any of this set of keys
|
|
||||||
* @return {number} - how many gaps were removed
|
|
||||||
*/
|
|
||||||
removeOverlappingGaps(overlappingKeys: OrderedSet<string> | string[]): number {
|
|
||||||
// To optimize processing with a large number of keys and/or gaps, this function:
|
|
||||||
//
|
|
||||||
// 1. converts the overlappingKeys array to a OrderedSet (if not already a OrderedSet)
|
|
||||||
// 2. queries both the gaps set and the overlapping keys set in a loop, which allows:
|
|
||||||
// - skipping ranges of overlapping keys at once when there is no new overlapping gap
|
|
||||||
// - skipping ranges of gaps at once when there is no overlapping key
|
|
||||||
//
|
|
||||||
// This way, it is efficient when the number of non-overlapping gaps is large
|
|
||||||
// (which is the most common case in practice).
|
|
||||||
|
|
||||||
let overlappingKeysSet;
|
|
||||||
if (Array.isArray(overlappingKeys)) {
|
|
||||||
overlappingKeysSet = new OrderedSet(overlappingKeys);
|
|
||||||
} else {
|
|
||||||
overlappingKeysSet = overlappingKeys;
|
|
||||||
}
|
|
||||||
const firstKeyIt = overlappingKeysSet.begin();
|
|
||||||
let currentKey = firstKeyIt.isAccessible() && firstKeyIt.pointer;
|
|
||||||
let nRemoved = 0;
|
|
||||||
while (currentKey) {
|
|
||||||
const closestGapIt = this._gaps.reverseUpperBound(<GapSetEntry>{ firstKey: currentKey });
|
|
||||||
if (closestGapIt.isAccessible()) {
|
|
||||||
const closestGap = closestGapIt.pointer;
|
|
||||||
if (currentKey <= closestGap.lastKey) {
|
|
||||||
// currentKey overlaps closestGap: remove the gap
|
|
||||||
this._gaps.eraseElementByIterator(closestGapIt);
|
|
||||||
nRemoved += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const nextGapIt = this._gaps.lowerBound(<GapSetEntry>{ firstKey: currentKey });
|
|
||||||
if (!nextGapIt.isAccessible()) {
|
|
||||||
// no more gap: we're done
|
|
||||||
return nRemoved;
|
|
||||||
}
|
|
||||||
const nextGap = nextGapIt.pointer;
|
|
||||||
// advance to the last key potentially overlapping with nextGap
|
|
||||||
let currentKeyIt = overlappingKeysSet.reverseLowerBound(nextGap.lastKey);
|
|
||||||
if (currentKeyIt.isAccessible()) {
|
|
||||||
currentKey = currentKeyIt.pointer;
|
|
||||||
if (currentKey >= nextGap.firstKey) {
|
|
||||||
// currentKey overlaps nextGap: remove the gap
|
|
||||||
this._gaps.eraseElementByIterator(nextGapIt);
|
|
||||||
nRemoved += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// advance to the first key potentially overlapping with another gap
|
|
||||||
currentKeyIt = overlappingKeysSet.lowerBound(nextGap.lastKey);
|
|
||||||
currentKey = currentKeyIt.isAccessible() && currentKeyIt.pointer;
|
|
||||||
}
|
|
||||||
return nRemoved;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Internal helper to coalesce multiple chained gaps into a single gap.
|
|
||||||
*
|
|
||||||
* It is only used to construct lookupGap() return values and
|
|
||||||
* doesn't modify the GapSet.
|
|
||||||
*
|
|
||||||
* NOTE: The function may take a noticeable amount of time and CPU
|
|
||||||
* to execute if a large number of chained gaps have to be
|
|
||||||
* coalesced, but it should never take more than a few seconds. In
|
|
||||||
* most cases it should take less than a millisecond. It regularly
|
|
||||||
* yields to the nodejs event loop to avoid blocking it during a
|
|
||||||
* long execution.
|
|
||||||
*
|
|
||||||
* @param {GapSetEntry} firstGap - first gap of the chain to coalesce with
|
|
||||||
* the next ones in the chain
|
|
||||||
* @return {Promise<GapSetEntry>} - a new coalesced entry, as a Promise
|
|
||||||
*/
|
|
||||||
_coalesceGapChain(firstGap: GapSetEntry): Promise<GapSetEntry> {
|
|
||||||
return new Promise(resolve => {
|
|
||||||
const coalescedGap: GapSetEntry = Object.assign({}, firstGap);
|
|
||||||
const coalesceGapChainIteration = () => {
|
|
||||||
// efficiency trade-off: 100 iterations of log(N) complexity lookups should
|
|
||||||
// not block the event loop for too long
|
|
||||||
for (let opCounter = 0; opCounter < 100; ++opCounter) {
|
|
||||||
const chainedGapIt = this._gaps.find(
|
|
||||||
<GapSetEntry>{ firstKey: coalescedGap.lastKey });
|
|
||||||
if (!chainedGapIt.isAccessible()) {
|
|
||||||
// chain is complete
|
|
||||||
return resolve(coalescedGap);
|
|
||||||
}
|
|
||||||
const chainedGap = chainedGapIt.pointer;
|
|
||||||
if (chainedGap.firstKey === chainedGap.lastKey) {
|
|
||||||
// found a single-key gap: chain is complete
|
|
||||||
return resolve(coalescedGap);
|
|
||||||
}
|
|
||||||
coalescedGap.lastKey = chainedGap.lastKey;
|
|
||||||
coalescedGap.weight += chainedGap.weight;
|
|
||||||
}
|
|
||||||
// yield to the event loop before continuing the process
|
|
||||||
// of coalescing the gap chain
|
|
||||||
return process.nextTick(coalesceGapChainIteration);
|
|
||||||
};
|
|
||||||
coalesceGapChainIteration();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Lookup the next gap that overlaps with [minKey, maxKey]. Internally chained
|
|
||||||
* gaps are coalesced in the response into a single contiguous large gap.
|
|
||||||
*
|
|
||||||
* @param {string} minKey - minimum key overlapping with the returned gap
|
|
||||||
* @param {string} [maxKey] - maximum key overlapping with the returned gap
|
|
||||||
* @return {Promise<GapSetEntry | null>} - result of the lookup if a gap
|
|
||||||
* was found, null otherwise, as a Promise
|
|
||||||
*/
|
|
||||||
async lookupGap(minKey: string, maxKey?: string): Promise<GapSetEntry | null> {
|
|
||||||
let firstGap: GapSetEntry | null = null;
|
|
||||||
const minGapIt = this._gaps.reverseLowerBound(<GapSetEntry>{ firstKey: minKey });
|
|
||||||
const minGap = minGapIt.isAccessible() && minGapIt.pointer;
|
|
||||||
if (minGap && minGap.lastKey >= minKey) {
|
|
||||||
firstGap = minGap;
|
|
||||||
} else {
|
|
||||||
const maxGapIt = this._gaps.upperBound(<GapSetEntry>{ firstKey: minKey });
|
|
||||||
const maxGap = maxGapIt.isAccessible() && maxGapIt.pointer;
|
|
||||||
if (maxGap && (maxKey === undefined || maxGap.firstKey <= maxKey)) {
|
|
||||||
firstGap = maxGap;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!firstGap) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return this._coalesceGapChain(firstGap);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the maximum weight setting for individual gaps.
|
|
||||||
*
|
|
||||||
* @return {number} - maximum weight of individual gaps
|
|
||||||
*/
|
|
||||||
get maxWeight(): number {
|
|
||||||
return this._maxWeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the maximum weight setting for individual gaps.
|
|
||||||
*
|
|
||||||
* @param {number} gapWeight - maximum weight of individual gaps
|
|
||||||
*/
|
|
||||||
set maxWeight(gapWeight: number) {
|
|
||||||
this._maxWeight = gapWeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the number of gaps stored in this set.
|
|
||||||
*
|
|
||||||
* @return {number} - number of gaps stored in this set
|
|
||||||
*/
|
|
||||||
get size(): number {
|
|
||||||
return this._gaps.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Iterate over each gap of the set, ordered by first key
|
|
||||||
*
|
|
||||||
* @return {Iterator<GapSetEntry>} - an iterator over all gaps
|
|
||||||
* Example:
|
|
||||||
* for (const gap of myGapSet) { ... }
|
|
||||||
*/
|
|
||||||
[Symbol.iterator](): Iterator<GapSetEntry> {
|
|
||||||
return this._gaps[Symbol.iterator]();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return an array containing all gaps, ordered by first key
|
|
||||||
*
|
|
||||||
* NOTE: there is a toArray() method in the OrderedSet implementation
|
|
||||||
* but it does not scale well and overflows the stack quickly. This is
|
|
||||||
* why we provide an implementation based on an iterator.
|
|
||||||
*
|
|
||||||
* @return {GapSetEntry[]} - an array containing all gaps
|
|
||||||
*/
|
|
||||||
toArray(): GapSetEntry[] {
|
|
||||||
return [...this];
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,167 +0,0 @@
|
||||||
const assert = require('assert');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @class
|
|
||||||
* @classdesc Implements a key-value in-memory cache with a capped
|
|
||||||
* number of items and a Least Recently Used (LRU) strategy for
|
|
||||||
* eviction.
|
|
||||||
*/
|
|
||||||
class LRUCache {
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param {number} maxEntries - maximum number of entries kept in
|
|
||||||
* the cache
|
|
||||||
*/
|
|
||||||
constructor(maxEntries) {
|
|
||||||
assert(maxEntries >= 1);
|
|
||||||
this._maxEntries = maxEntries;
|
|
||||||
this.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add or update the value associated to a key in the cache,
|
|
||||||
* making it the most recently accessed for eviction purpose.
|
|
||||||
*
|
|
||||||
* @param {string} key - key to add
|
|
||||||
* @param {object} value - associated value (can be of any type)
|
|
||||||
* @return {boolean} true if the cache contained an entry with
|
|
||||||
* this key, false if it did not
|
|
||||||
*/
|
|
||||||
add(key, value) {
|
|
||||||
let entry = this._entryMap[key];
|
|
||||||
if (entry) {
|
|
||||||
entry.value = value;
|
|
||||||
// make the entry the most recently used by re-pushing it
|
|
||||||
// to the head of the LRU list
|
|
||||||
this._lruRemoveEntry(entry);
|
|
||||||
this._lruPushEntry(entry);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (this._entryCount === this._maxEntries) {
|
|
||||||
// if the cache is already full, abide by the LRU strategy
|
|
||||||
// and remove the least recently used entry from the cache
|
|
||||||
// before pushing the new entry
|
|
||||||
this._removeEntry(this._lruTail);
|
|
||||||
}
|
|
||||||
entry = { key, value };
|
|
||||||
this._entryMap[key] = entry;
|
|
||||||
this._entryCount += 1;
|
|
||||||
this._lruPushEntry(entry);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the value associated to a key in the cache, making it the
|
|
||||||
* most recently accessed for eviction purpose.
|
|
||||||
*
|
|
||||||
* @param {string} key - key of which to fetch the associated value
|
|
||||||
* @return {object|undefined} - returns the associated value if
|
|
||||||
* exists in the cache, or undefined if not found - either if the
|
|
||||||
* key was never added or if it has been evicted from the cache.
|
|
||||||
*/
|
|
||||||
get(key) {
|
|
||||||
const entry = this._entryMap[key];
|
|
||||||
if (entry) {
|
|
||||||
// make the entry the most recently used by re-pushing it
|
|
||||||
// to the head of the LRU list
|
|
||||||
this._lruRemoveEntry(entry);
|
|
||||||
this._lruPushEntry(entry);
|
|
||||||
return entry.value;
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove an entry from the cache if exists
|
|
||||||
*
|
|
||||||
* @param {string} key - key to remove
|
|
||||||
* @return {boolean} true if an entry has been removed, false if
|
|
||||||
* there was no entry with this key in the cache - either if the
|
|
||||||
* key was never added or if it has been evicted from the cache.
|
|
||||||
*/
|
|
||||||
remove(key) {
|
|
||||||
const entry = this._entryMap[key];
|
|
||||||
if (entry) {
|
|
||||||
this._removeEntry(entry);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the current number of cached entries
|
|
||||||
*
|
|
||||||
* @return {number} current number of cached entries
|
|
||||||
*/
|
|
||||||
count() {
|
|
||||||
return this._entryCount;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove all entries from the cache
|
|
||||||
*
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
clear() {
|
|
||||||
this._entryMap = {};
|
|
||||||
this._entryCount = 0;
|
|
||||||
this._lruHead = null;
|
|
||||||
this._lruTail = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Push an entry to the front of the LRU list, making it the most
|
|
||||||
* recently accessed
|
|
||||||
*
|
|
||||||
* @param {object} entry - entry to push
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
_lruPushEntry(entry) {
|
|
||||||
/* eslint-disable no-param-reassign */
|
|
||||||
entry._lruNext = this._lruHead;
|
|
||||||
entry._lruPrev = null;
|
|
||||||
if (this._lruHead) {
|
|
||||||
this._lruHead._lruPrev = entry;
|
|
||||||
}
|
|
||||||
this._lruHead = entry;
|
|
||||||
if (!this._lruTail) {
|
|
||||||
this._lruTail = entry;
|
|
||||||
}
|
|
||||||
/* eslint-enable no-param-reassign */
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove an entry from the LRU list
|
|
||||||
*
|
|
||||||
* @param {object} entry - entry to remove
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
_lruRemoveEntry(entry) {
|
|
||||||
/* eslint-disable no-param-reassign */
|
|
||||||
if (entry._lruPrev) {
|
|
||||||
entry._lruPrev._lruNext = entry._lruNext;
|
|
||||||
} else {
|
|
||||||
this._lruHead = entry._lruNext;
|
|
||||||
}
|
|
||||||
if (entry._lruNext) {
|
|
||||||
entry._lruNext._lruPrev = entry._lruPrev;
|
|
||||||
} else {
|
|
||||||
this._lruTail = entry._lruPrev;
|
|
||||||
}
|
|
||||||
/* eslint-enable no-param-reassign */
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Helper function to remove an existing entry from the cache
|
|
||||||
*
|
|
||||||
* @param {object} entry - cache entry to remove
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
_removeEntry(entry) {
|
|
||||||
this._lruRemoveEntry(entry);
|
|
||||||
delete this._entryMap[entry.key];
|
|
||||||
this._entryCount -= 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = LRUCache;
|
|
|
@ -1,124 +0,0 @@
|
||||||
export enum HeapOrder {
|
|
||||||
Min = -1,
|
|
||||||
Max = 1,
|
|
||||||
}
|
|
||||||
|
|
||||||
export enum CompareResult {
|
|
||||||
LT = -1,
|
|
||||||
EQ = 0,
|
|
||||||
GT = 1,
|
|
||||||
}
|
|
||||||
|
|
||||||
export type CompareFunction = (x: any, y: any) => CompareResult;
|
|
||||||
|
|
||||||
export class Heap {
|
|
||||||
size: number;
|
|
||||||
_maxSize: number;
|
|
||||||
_order: HeapOrder;
|
|
||||||
_heap: any[];
|
|
||||||
_cmpFn: CompareFunction;
|
|
||||||
|
|
||||||
constructor(size: number, order: HeapOrder, cmpFn: CompareFunction) {
|
|
||||||
this.size = 0;
|
|
||||||
this._maxSize = size;
|
|
||||||
this._order = order;
|
|
||||||
this._cmpFn = cmpFn;
|
|
||||||
this._heap = new Array<any>(this._maxSize);
|
|
||||||
}
|
|
||||||
|
|
||||||
_parent(i: number): number {
|
|
||||||
return Math.floor((i - 1) / 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
_left(i: number): number {
|
|
||||||
return Math.floor((2 * i) + 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
_right(i: number): number {
|
|
||||||
return Math.floor((2 * i) + 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
_shouldSwap(childIdx: number, parentIdx: number): boolean {
|
|
||||||
return this._cmpFn(this._heap[childIdx], this._heap[parentIdx]) as number === this._order as number;
|
|
||||||
}
|
|
||||||
|
|
||||||
_swap(i: number, j: number) {
|
|
||||||
const tmp = this._heap[i];
|
|
||||||
this._heap[i] = this._heap[j];
|
|
||||||
this._heap[j] = tmp;
|
|
||||||
}
|
|
||||||
|
|
||||||
_heapify(i: number) {
|
|
||||||
const l = this._left(i);
|
|
||||||
const r = this._right(i);
|
|
||||||
let c = i;
|
|
||||||
|
|
||||||
if (l < this.size && this._shouldSwap(l, c)) {
|
|
||||||
c = l;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (r < this.size && this._shouldSwap(r, c)) {
|
|
||||||
c = r;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (c != i) {
|
|
||||||
this._swap(c, i);
|
|
||||||
this._heapify(c);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
add(item: any): any {
|
|
||||||
if (this.size >= this._maxSize) {
|
|
||||||
return new Error('Max heap size reached');
|
|
||||||
}
|
|
||||||
|
|
||||||
++this.size;
|
|
||||||
let c = this.size - 1;
|
|
||||||
this._heap[c] = item;
|
|
||||||
|
|
||||||
while (c > 0) {
|
|
||||||
if (!this._shouldSwap(c, this._parent(c))) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
this._swap(c, this._parent(c));
|
|
||||||
c = this._parent(c);
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
};
|
|
||||||
|
|
||||||
remove(): any {
|
|
||||||
if (this.size <= 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const ret = this._heap[0];
|
|
||||||
this._heap[0] = this._heap[this.size - 1];
|
|
||||||
this._heapify(0);
|
|
||||||
--this.size;
|
|
||||||
|
|
||||||
return ret;
|
|
||||||
};
|
|
||||||
|
|
||||||
peek(): any {
|
|
||||||
if (this.size <= 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return this._heap[0];
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export class MinHeap extends Heap {
|
|
||||||
constructor(size: number, cmpFn: CompareFunction) {
|
|
||||||
super(size, HeapOrder.Min, cmpFn);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export class MaxHeap extends Heap {
|
|
||||||
constructor(size: number, cmpFn: CompareFunction) {
|
|
||||||
super(size, HeapOrder.Max, cmpFn);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,129 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const { FILTER_ACCEPT, SKIP_NONE } = require('./tools');
|
|
||||||
|
|
||||||
// Use a heuristic to amortize the cost of JSON
|
|
||||||
// serialization/deserialization only on largest metadata where the
|
|
||||||
// potential for size reduction is high, considering the bulk of the
|
|
||||||
// blob size is due to the "location" field containing a large number
|
|
||||||
// of MPU parts.
|
|
||||||
//
|
|
||||||
// Measured on some standard metadata:
|
|
||||||
// - 100 parts -> 9K blob
|
|
||||||
// - 2000 parts -> 170K blob
|
|
||||||
//
|
|
||||||
// Using a 10K threshold should lead to a worst case of about 10M to
|
|
||||||
// store a raw listing of 1000 entries, even with some growth
|
|
||||||
// multiplication factor due to some internal memory duplication, it
|
|
||||||
// should stay within reasonable memory limits.
|
|
||||||
|
|
||||||
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Base class of listing extensions.
|
|
||||||
*/
|
|
||||||
class Extension {
|
|
||||||
/**
|
|
||||||
* This takes a list of parameters and a logger as the inputs.
|
|
||||||
* Derivatives should have their own format regarding parameters.
|
|
||||||
*
|
|
||||||
* @param {Object} parameters - listing parameter from applications
|
|
||||||
* @param {RequestLogger} logger - the logger
|
|
||||||
* @constructor
|
|
||||||
*/
|
|
||||||
constructor(parameters, logger) {
|
|
||||||
// inputs
|
|
||||||
this.parameters = parameters;
|
|
||||||
this.logger = logger;
|
|
||||||
// listing results
|
|
||||||
this.res = undefined;
|
|
||||||
this.keys = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filters-out non-requested optional fields from the value. This function
|
|
||||||
* shall be applied on any value that is to be returned as part of the
|
|
||||||
* result of a listing extension.
|
|
||||||
*
|
|
||||||
* @param {String} value - The JSON value of a listing item
|
|
||||||
*
|
|
||||||
* @return {String} The value that may have been trimmed of some
|
|
||||||
* heavy unused fields, or left untouched (depending on size
|
|
||||||
* heuristics)
|
|
||||||
*/
|
|
||||||
trimMetadata(value) {
|
|
||||||
let ret = undefined;
|
|
||||||
if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
|
||||||
try {
|
|
||||||
ret = JSON.parse(value);
|
|
||||||
delete ret.location;
|
|
||||||
ret = JSON.stringify(ret);
|
|
||||||
} catch (e) {
|
|
||||||
// Prefer returning an unfiltered data rather than
|
|
||||||
// stopping the service in case of parsing failure.
|
|
||||||
// The risk of this approach is a potential
|
|
||||||
// reproduction of MD-692, where too much memory is
|
|
||||||
// used by repd.
|
|
||||||
this.logger.warn(
|
|
||||||
'Could not parse Object Metadata while listing',
|
|
||||||
{ err: e.toString() });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ret || value;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generates listing parameters that metadata can understand from the input
|
|
||||||
* parameters. What metadata can understand: gt, gte, lt, lte, limit, keys,
|
|
||||||
* values, reverse; we use the same set of parameters as levelup's.
|
|
||||||
* Derivatives should have their own conversion of their original listing
|
|
||||||
* parameters into metadata listing parameters.
|
|
||||||
*
|
|
||||||
* @return {object} - listing parameters for metadata
|
|
||||||
*/
|
|
||||||
genMDParams() {
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function receives a data entry from metadata and decides if it will
|
|
||||||
* include the entry in the listing result or not.
|
|
||||||
*
|
|
||||||
* @param {object} entry - a listing entry from metadata
|
|
||||||
* expected format: { key, value }
|
|
||||||
* @return {number} - result of filtering the entry:
|
|
||||||
* FILTER_ACCEPT: entry is accepted and may or not be included
|
|
||||||
* in the result
|
|
||||||
* FILTER_SKIP: listing may skip directly (with "gte" param) to
|
|
||||||
* the key returned by the skipping() method
|
|
||||||
* FILTER_END: the results are complete, listing can be stopped
|
|
||||||
*/
|
|
||||||
filter(/* entry: { key, value } */) {
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Provides the next key at which the listing task is allowed to skip to.
|
|
||||||
* This could allow to skip over:
|
|
||||||
* - a key prefix ending with the delimiter
|
|
||||||
* - all remaining versions of an object when doing a current
|
|
||||||
* versions listing in v0 format
|
|
||||||
* - a cached "gap" of deleted objects when doing a current
|
|
||||||
* versions listing in v0 format
|
|
||||||
*
|
|
||||||
* @return {string} - the next key at which the listing task is allowed to skip to
|
|
||||||
*/
|
|
||||||
skipping() {
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the listing resutls. Format depends on derivatives' specific logic.
|
|
||||||
* @return {Array} - The listed elements
|
|
||||||
*/
|
|
||||||
result() {
|
|
||||||
return this.res;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports.default = Extension;
|
|
|
@ -1,188 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
|
|
||||||
FILTER_END, FILTER_ACCEPT, SKIP_NONE } = require('./tools');
|
|
||||||
const DEFAULT_MAX_KEYS = 1000;
|
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
|
||||||
|
|
||||||
function numberDefault(num, defaultNum) {
|
|
||||||
const parsedNum = Number.parseInt(num, 10);
|
|
||||||
return Number.isNaN(parsedNum) ? defaultNum : parsedNum;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Class for the MultipartUploads extension
|
|
||||||
*/
|
|
||||||
class MultipartUploads {
|
|
||||||
/**
|
|
||||||
* Constructor of the extension
|
|
||||||
* Init and check parameters
|
|
||||||
* @param {Object} params - The parameters you sent to DBD
|
|
||||||
* @param {RequestLogger} logger - The logger of the request
|
|
||||||
* @param {String} [vFormat] - versioning key format
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
constructor(params, logger, vFormat) {
|
|
||||||
this.params = params;
|
|
||||||
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
|
||||||
this.CommonPrefixes = [];
|
|
||||||
this.Uploads = [];
|
|
||||||
this.IsTruncated = false;
|
|
||||||
this.NextKeyMarker = '';
|
|
||||||
this.NextUploadIdMarker = '';
|
|
||||||
this.prefixLength = 0;
|
|
||||||
this.queryPrefixLength = numberDefault(params.queryPrefixLength, 0);
|
|
||||||
this.keys = 0;
|
|
||||||
this.maxKeys = checkLimit(params.maxKeys, DEFAULT_MAX_KEYS);
|
|
||||||
this.delimiter = params.delimiter;
|
|
||||||
this.splitter = params.splitter;
|
|
||||||
this.logger = logger;
|
|
||||||
|
|
||||||
Object.assign(this, {
|
|
||||||
[BucketVersioningKeyFormat.v0]: {
|
|
||||||
genMDParams: this.genMDParamsV0,
|
|
||||||
getObjectKey: this.getObjectKeyV0,
|
|
||||||
},
|
|
||||||
[BucketVersioningKeyFormat.v1]: {
|
|
||||||
genMDParams: this.genMDParamsV1,
|
|
||||||
getObjectKey: this.getObjectKeyV1,
|
|
||||||
},
|
|
||||||
}[this.vFormat]);
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV0() {
|
|
||||||
const params = {};
|
|
||||||
if (this.params.keyMarker) {
|
|
||||||
params.gt = `overview${this.params.splitter}` +
|
|
||||||
`${this.params.keyMarker}${this.params.splitter}`;
|
|
||||||
if (this.params.uploadIdMarker) {
|
|
||||||
params.gt += `${this.params.uploadIdMarker}`;
|
|
||||||
}
|
|
||||||
// advance so that lower bound does not include the supplied
|
|
||||||
// markers
|
|
||||||
params.gt = inc(params.gt);
|
|
||||||
}
|
|
||||||
if (this.params.prefix) {
|
|
||||||
if (params.gt === undefined || this.params.prefix > params.gt) {
|
|
||||||
delete params.gt;
|
|
||||||
params.gte = this.params.prefix;
|
|
||||||
}
|
|
||||||
params.lt = inc(this.params.prefix);
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV1() {
|
|
||||||
const v0params = this.genMDParamsV0();
|
|
||||||
return listingParamsMasterKeysV0ToV1(v0params);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function adds the elements to the Uploads
|
|
||||||
* Set the NextKeyMarker to the current key
|
|
||||||
* Increment the keys counter
|
|
||||||
* @param {String} value - The value of the key
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addUpload(value) {
|
|
||||||
const tmp = JSON.parse(value);
|
|
||||||
this.Uploads.push({
|
|
||||||
key: tmp.key,
|
|
||||||
value: {
|
|
||||||
UploadId: tmp.uploadId,
|
|
||||||
Initiator: {
|
|
||||||
ID: tmp.initiator.ID,
|
|
||||||
DisplayName: tmp.initiator.DisplayName,
|
|
||||||
},
|
|
||||||
Owner: {
|
|
||||||
ID: tmp['owner-id'],
|
|
||||||
DisplayName: tmp['owner-display-name'],
|
|
||||||
},
|
|
||||||
StorageClass: tmp['x-amz-storage-class'],
|
|
||||||
Initiated: tmp.initiated,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
this.NextKeyMarker = tmp.key;
|
|
||||||
this.NextUploadIdMarker = tmp.uploadId;
|
|
||||||
++this.keys;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* This function adds a common prefix to the CommonPrefixes array
|
|
||||||
* Set the NextKeyMarker to the current commonPrefix
|
|
||||||
* Increment the keys counter
|
|
||||||
* @param {String} commonPrefix - The commonPrefix to add
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addCommonPrefix(commonPrefix) {
|
|
||||||
if (this.CommonPrefixes.indexOf(commonPrefix) === -1) {
|
|
||||||
this.CommonPrefixes.push(commonPrefix);
|
|
||||||
this.NextKeyMarker = commonPrefix;
|
|
||||||
++this.keys;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
getObjectKeyV0(obj) {
|
|
||||||
return obj.key;
|
|
||||||
}
|
|
||||||
|
|
||||||
getObjectKeyV1(obj) {
|
|
||||||
return obj.key.slice(DbPrefixes.Master.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function applies filter on each element
|
|
||||||
* @param {String} obj - The key and value of the element
|
|
||||||
* @return {number} - > 0: Continue, < 0: Stop
|
|
||||||
*/
|
|
||||||
filter(obj) {
|
|
||||||
// Check first in case of maxkeys = 0
|
|
||||||
if (this.keys >= this.maxKeys) {
|
|
||||||
// In cases of maxKeys <= 0 => IsTruncated = false
|
|
||||||
this.IsTruncated = this.maxKeys > 0;
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
const key = this.getObjectKey(obj);
|
|
||||||
const value = obj.value;
|
|
||||||
if (this.delimiter) {
|
|
||||||
const mpuPrefixSlice = `overview${this.splitter}`.length;
|
|
||||||
const mpuKey = key.slice(mpuPrefixSlice);
|
|
||||||
const commonPrefixIndex = mpuKey.indexOf(this.delimiter,
|
|
||||||
this.queryPrefixLength);
|
|
||||||
|
|
||||||
if (commonPrefixIndex === -1) {
|
|
||||||
this.addUpload(value);
|
|
||||||
} else {
|
|
||||||
this.addCommonPrefix(mpuKey.substring(0,
|
|
||||||
commonPrefixIndex + this.delimiter.length));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
this.addUpload(value);
|
|
||||||
}
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
skipping() {
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the formatted result
|
|
||||||
* @return {Object} - The result.
|
|
||||||
*/
|
|
||||||
result() {
|
|
||||||
return {
|
|
||||||
CommonPrefixes: this.CommonPrefixes,
|
|
||||||
Uploads: this.Uploads,
|
|
||||||
IsTruncated: this.IsTruncated,
|
|
||||||
NextKeyMarker: this.NextKeyMarker,
|
|
||||||
MaxKeys: this.maxKeys,
|
|
||||||
NextUploadIdMarker: this.NextUploadIdMarker,
|
|
||||||
Delimiter: this.delimiter,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
MultipartUploads,
|
|
||||||
};
|
|
|
@ -1,127 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const Extension = require('./Extension').default;
|
|
||||||
|
|
||||||
const { checkLimit, FILTER_END, FILTER_ACCEPT } = require('./tools');
|
|
||||||
const DEFAULT_MAX_KEYS = 10000;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Class of an extension doing the simple listing
|
|
||||||
*/
|
|
||||||
class List extends Extension {
|
|
||||||
/**
|
|
||||||
* Constructor
|
|
||||||
* Set the logger and the res
|
|
||||||
* @param {Object} parameters - The parameters you sent to DBD
|
|
||||||
* @param {RequestLogger} logger - The logger of the request
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
constructor(parameters, logger) {
|
|
||||||
super(parameters, logger);
|
|
||||||
this.res = [];
|
|
||||||
if (parameters) {
|
|
||||||
this.maxKeys = checkLimit(parameters.maxKeys, DEFAULT_MAX_KEYS);
|
|
||||||
this.filterKey = parameters.filterKey;
|
|
||||||
this.filterKeyStartsWith = parameters.filterKeyStartsWith;
|
|
||||||
} else {
|
|
||||||
this.maxKeys = DEFAULT_MAX_KEYS;
|
|
||||||
}
|
|
||||||
this.keys = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParams() {
|
|
||||||
const params = this.parameters ? {
|
|
||||||
gt: this.parameters.gt,
|
|
||||||
gte: this.parameters.gte || this.parameters.start,
|
|
||||||
lt: this.parameters.lt,
|
|
||||||
lte: this.parameters.lte || this.parameters.end,
|
|
||||||
keys: this.parameters.keys,
|
|
||||||
values: this.parameters.values,
|
|
||||||
} : {};
|
|
||||||
Object.keys(params).forEach(key => {
|
|
||||||
if (params[key] === null || params[key] === undefined) {
|
|
||||||
delete params[key];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filters customAttributes sub-object if present
|
|
||||||
*
|
|
||||||
* @param {String} value - The JSON value of a listing item
|
|
||||||
*
|
|
||||||
* @return {Boolean} Returns true if matches, else false.
|
|
||||||
*/
|
|
||||||
customFilter(value) {
|
|
||||||
let _value;
|
|
||||||
try {
|
|
||||||
_value = JSON.parse(value);
|
|
||||||
} catch (e) {
|
|
||||||
// Prefer returning an unfiltered data rather than
|
|
||||||
// stopping the service in case of parsing failure.
|
|
||||||
// The risk of this approach is a potential
|
|
||||||
// reproduction of MD-692, where too much memory is
|
|
||||||
// used by repd.
|
|
||||||
this.logger.warn(
|
|
||||||
'Could not parse Object Metadata while listing',
|
|
||||||
{ err: e.toString() });
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (_value.customAttributes !== undefined) {
|
|
||||||
for (const key of Object.keys(_value.customAttributes)) {
|
|
||||||
if (this.filterKey !== undefined &&
|
|
||||||
key === this.filterKey) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (this.filterKeyStartsWith !== undefined &&
|
|
||||||
key.startsWith(this.filterKeyStartsWith)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Function apply on each element
|
|
||||||
* Just add it to the array
|
|
||||||
* @param {Object} elem - The data from the database
|
|
||||||
* @return {number} - > 0 : continue listing
|
|
||||||
* < 0 : listing done
|
|
||||||
*/
|
|
||||||
filter(elem) {
|
|
||||||
// Check if the result array is full
|
|
||||||
if (this.keys >= this.maxKeys) {
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
if ((this.filterKey !== undefined ||
|
|
||||||
this.filterKeyStartsWith !== undefined) &&
|
|
||||||
typeof elem === 'object' &&
|
|
||||||
!this.customFilter(elem.value)) {
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
if (typeof elem === 'object') {
|
|
||||||
this.res.push({
|
|
||||||
key: elem.key,
|
|
||||||
value: this.trimMetadata(elem.value),
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
this.res.push(elem);
|
|
||||||
}
|
|
||||||
this.keys++;
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Function returning the result
|
|
||||||
* @return {Array} - The listed elements
|
|
||||||
*/
|
|
||||||
result() {
|
|
||||||
return this.res;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
List,
|
|
||||||
};
|
|
|
@ -1,356 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const Extension = require('./Extension').default;
|
|
||||||
const { inc, listingParamsMasterKeysV0ToV1,
|
|
||||||
FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
|
||||||
|
|
||||||
export interface FilterState {
|
|
||||||
id: number,
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface FilterReturnValue {
|
|
||||||
FILTER_ACCEPT,
|
|
||||||
FILTER_SKIP,
|
|
||||||
FILTER_END,
|
|
||||||
};
|
|
||||||
|
|
||||||
export const enum DelimiterFilterStateId {
|
|
||||||
NotSkipping = 1,
|
|
||||||
SkippingPrefix = 2,
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface DelimiterFilterState_NotSkipping extends FilterState {
|
|
||||||
id: DelimiterFilterStateId.NotSkipping,
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface DelimiterFilterState_SkippingPrefix extends FilterState {
|
|
||||||
id: DelimiterFilterStateId.SkippingPrefix,
|
|
||||||
prefix: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
type KeyHandler = (key: string, value: string) => FilterReturnValue;
|
|
||||||
|
|
||||||
export type ResultObject = {
|
|
||||||
CommonPrefixes: string[];
|
|
||||||
Contents: {
|
|
||||||
key: string;
|
|
||||||
value: string;
|
|
||||||
}[];
|
|
||||||
IsTruncated: boolean;
|
|
||||||
Delimiter ?: string;
|
|
||||||
NextMarker ?: string;
|
|
||||||
NextContinuationToken ?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters
|
|
||||||
*
|
|
||||||
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
|
||||||
* @prop {String[]} Contents - 'files' to list
|
|
||||||
* @prop {Boolean} IsTruncated - truncated listing flag
|
|
||||||
* @prop {String|undefined} NextMarker - marker per amazon format
|
|
||||||
* @prop {Number} keys - count of listed keys
|
|
||||||
* @prop {String|undefined} delimiter - separator per amazon format
|
|
||||||
* @prop {String|undefined} prefix - prefix per amazon format
|
|
||||||
* @prop {Number} maxKeys - number of keys to list
|
|
||||||
*/
|
|
||||||
export class Delimiter extends Extension {
|
|
||||||
|
|
||||||
state: FilterState;
|
|
||||||
keyHandlers: { [id: number]: KeyHandler };
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a new Delimiter instance
|
|
||||||
* @constructor
|
|
||||||
* @param {Object} parameters - listing parameters
|
|
||||||
* @param {String} [parameters.delimiter] - delimiter per amazon
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.prefix] - prefix per amazon
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.marker] - marker per amazon
|
|
||||||
* format
|
|
||||||
* @param {Number} [parameters.maxKeys] - number of keys to list
|
|
||||||
* @param {Boolean} [parameters.v2] - indicates whether v2
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.startAfter] - marker per amazon
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.continuationToken] - obfuscated amazon
|
|
||||||
* token
|
|
||||||
* @param {RequestLogger} logger - The logger of the
|
|
||||||
* request
|
|
||||||
* @param {String} [vFormat] - versioning key format
|
|
||||||
*/
|
|
||||||
constructor(parameters, logger, vFormat) {
|
|
||||||
super(parameters, logger);
|
|
||||||
// original listing parameters
|
|
||||||
this.delimiter = parameters.delimiter;
|
|
||||||
this.prefix = parameters.prefix;
|
|
||||||
this.maxKeys = parameters.maxKeys || 1000;
|
|
||||||
|
|
||||||
if (parameters.v2) {
|
|
||||||
this.marker = parameters.continuationToken || parameters.startAfter;
|
|
||||||
} else {
|
|
||||||
this.marker = parameters.marker;
|
|
||||||
}
|
|
||||||
this.nextMarker = this.marker;
|
|
||||||
|
|
||||||
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
|
||||||
// results
|
|
||||||
this.CommonPrefixes = [];
|
|
||||||
this.Contents = [];
|
|
||||||
this.IsTruncated = false;
|
|
||||||
this.keyHandlers = {};
|
|
||||||
|
|
||||||
Object.assign(this, {
|
|
||||||
[BucketVersioningKeyFormat.v0]: {
|
|
||||||
genMDParams: this.genMDParamsV0,
|
|
||||||
getObjectKey: this.getObjectKeyV0,
|
|
||||||
skipping: this.skippingV0,
|
|
||||||
},
|
|
||||||
[BucketVersioningKeyFormat.v1]: {
|
|
||||||
genMDParams: this.genMDParamsV1,
|
|
||||||
getObjectKey: this.getObjectKeyV1,
|
|
||||||
skipping: this.skippingV1,
|
|
||||||
},
|
|
||||||
}[this.vFormat]);
|
|
||||||
|
|
||||||
// if there is a delimiter, we may skip ranges by prefix,
|
|
||||||
// hence using the NotSkippingPrefix flavor that checks the
|
|
||||||
// subprefix up to the delimiter for the NotSkipping state
|
|
||||||
if (this.delimiter) {
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterFilterStateId.NotSkipping,
|
|
||||||
this.keyHandler_NotSkippingPrefix.bind(this));
|
|
||||||
} else {
|
|
||||||
// listing without a delimiter never has to skip over any
|
|
||||||
// prefix -> use NeverSkipping flavor for the NotSkipping
|
|
||||||
// state
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterFilterStateId.NotSkipping,
|
|
||||||
this.keyHandler_NeverSkipping.bind(this));
|
|
||||||
}
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterFilterStateId.SkippingPrefix,
|
|
||||||
this.keyHandler_SkippingPrefix.bind(this));
|
|
||||||
|
|
||||||
this.state = <DelimiterFilterState_NotSkipping> {
|
|
||||||
id: DelimiterFilterStateId.NotSkipping,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV0() {
|
|
||||||
const params: { gt ?: string, gte ?: string, lt ?: string } = {};
|
|
||||||
if (this.prefix) {
|
|
||||||
params.gte = this.prefix;
|
|
||||||
params.lt = inc(this.prefix);
|
|
||||||
}
|
|
||||||
if (this.marker && this.delimiter) {
|
|
||||||
const commonPrefix = this.getCommonPrefix(this.marker);
|
|
||||||
if (commonPrefix) {
|
|
||||||
const afterPrefix = inc(commonPrefix);
|
|
||||||
if (!params.gte || afterPrefix > params.gte) {
|
|
||||||
params.gte = afterPrefix;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (this.marker && (!params.gte || this.marker >= params.gte)) {
|
|
||||||
delete params.gte;
|
|
||||||
params.gt = this.marker;
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV1() {
|
|
||||||
const params = this.genMDParamsV0();
|
|
||||||
return listingParamsMasterKeysV0ToV1(params);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* check if the max keys count has been reached and set the
|
|
||||||
* final state of the result if it is the case
|
|
||||||
* @return {Boolean} - indicates if the iteration has to stop
|
|
||||||
*/
|
|
||||||
_reachedMaxKeys(): boolean {
|
|
||||||
if (this.keys >= this.maxKeys) {
|
|
||||||
// In cases of maxKeys <= 0 -> IsTruncated = false
|
|
||||||
this.IsTruncated = this.maxKeys > 0;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a (key, value) tuple to the listing
|
|
||||||
* Set the NextMarker to the current key
|
|
||||||
* Increment the keys counter
|
|
||||||
* @param {String} key - The key to add
|
|
||||||
* @param {String} value - The value of the key
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
addContents(key: string, value: string): void {
|
|
||||||
this.Contents.push({ key, value: this.trimMetadata(value) });
|
|
||||||
++this.keys;
|
|
||||||
this.nextMarker = key;
|
|
||||||
}
|
|
||||||
|
|
||||||
getCommonPrefix(key: string): string | undefined {
|
|
||||||
if (!this.delimiter) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
|
||||||
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
|
||||||
if (delimiterIndex === -1) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return key.substring(0, delimiterIndex + this.delimiter.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a Common Prefix in the list
|
|
||||||
* @param {String} commonPrefix - common prefix to add
|
|
||||||
* @param {String} key - full key starting with commonPrefix
|
|
||||||
* @return {Boolean} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
addCommonPrefix(commonPrefix: string, key: string): void {
|
|
||||||
// add the new prefix to the list
|
|
||||||
this.CommonPrefixes.push(commonPrefix);
|
|
||||||
++this.keys;
|
|
||||||
this.nextMarker = commonPrefix;
|
|
||||||
}
|
|
||||||
|
|
||||||
addCommonPrefixOrContents(key: string, value: string): string | undefined {
|
|
||||||
// add the subprefix to the common prefixes if the key has the delimiter
|
|
||||||
const commonPrefix = this.getCommonPrefix(key);
|
|
||||||
if (commonPrefix) {
|
|
||||||
this.addCommonPrefix(commonPrefix, key);
|
|
||||||
return commonPrefix;
|
|
||||||
}
|
|
||||||
this.addContents(key, value);
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
getObjectKeyV0(obj: { key: string }): string {
|
|
||||||
return obj.key;
|
|
||||||
}
|
|
||||||
|
|
||||||
getObjectKeyV1(obj: { key: string }): string {
|
|
||||||
return obj.key.slice(DbPrefixes.Master.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter to apply on each iteration, based on:
|
|
||||||
* - prefix
|
|
||||||
* - delimiter
|
|
||||||
* - maxKeys
|
|
||||||
* The marker is being handled directly by levelDB
|
|
||||||
* @param {Object} obj - The key and value of the element
|
|
||||||
* @param {String} obj.key - The key of the element
|
|
||||||
* @param {String} obj.value - The value of the element
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
filter(obj: { key: string, value: string }): FilterReturnValue {
|
|
||||||
const key = this.getObjectKey(obj);
|
|
||||||
const value = obj.value;
|
|
||||||
|
|
||||||
return this.handleKey(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
setState(state: FilterState): void {
|
|
||||||
this.state = state;
|
|
||||||
}
|
|
||||||
|
|
||||||
setKeyHandler(stateId: number, keyHandler: KeyHandler): void {
|
|
||||||
this.keyHandlers[stateId] = keyHandler;
|
|
||||||
}
|
|
||||||
|
|
||||||
handleKey(key: string, value: string): FilterReturnValue {
|
|
||||||
return this.keyHandlers[this.state.id](key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_NeverSkipping(key: string, value: string): FilterReturnValue {
|
|
||||||
if (this._reachedMaxKeys()) {
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
this.addContents(key, value);
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_NotSkippingPrefix(key: string, value: string): FilterReturnValue {
|
|
||||||
if (this._reachedMaxKeys()) {
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
const commonPrefix = this.addCommonPrefixOrContents(key, value);
|
|
||||||
if (commonPrefix) {
|
|
||||||
// transition into SkippingPrefix state to skip all following keys
|
|
||||||
// while they start with the same prefix
|
|
||||||
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
|
||||||
id: DelimiterFilterStateId.SkippingPrefix,
|
|
||||||
prefix: commonPrefix,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_SkippingPrefix(key: string, value: string): FilterReturnValue {
|
|
||||||
const { prefix } = <DelimiterFilterState_SkippingPrefix> this.state;
|
|
||||||
if (key.startsWith(prefix)) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
this.setState(<DelimiterFilterState_NotSkipping> {
|
|
||||||
id: DelimiterFilterStateId.NotSkipping,
|
|
||||||
});
|
|
||||||
return this.handleKey(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingBase(): string | undefined {
|
|
||||||
switch (this.state.id) {
|
|
||||||
case DelimiterFilterStateId.SkippingPrefix:
|
|
||||||
const { prefix } = <DelimiterFilterState_SkippingPrefix> this.state;
|
|
||||||
return inc(prefix);
|
|
||||||
|
|
||||||
default:
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingV0() {
|
|
||||||
return this.skippingBase();
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingV1() {
|
|
||||||
const skipTo = this.skippingBase();
|
|
||||||
if (skipTo === SKIP_NONE) {
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
return DbPrefixes.Master + skipTo;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return an object containing all mandatory fields to use once the
|
|
||||||
* iteration is done, doesn't show a NextMarker field if the output
|
|
||||||
* isn't truncated
|
|
||||||
* @return {Object} - following amazon format
|
|
||||||
*/
|
|
||||||
result(): ResultObject {
|
|
||||||
/* NextMarker is only provided when delimiter is used.
|
|
||||||
* specified in v1 listing documentation
|
|
||||||
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
|
|
||||||
*/
|
|
||||||
const result: ResultObject = {
|
|
||||||
CommonPrefixes: this.CommonPrefixes,
|
|
||||||
Contents: this.Contents,
|
|
||||||
IsTruncated: this.IsTruncated,
|
|
||||||
Delimiter: this.delimiter,
|
|
||||||
};
|
|
||||||
if (this.parameters.v2) {
|
|
||||||
result.NextContinuationToken = this.IsTruncated
|
|
||||||
? this.nextMarker : undefined;
|
|
||||||
} else {
|
|
||||||
result.NextMarker = (this.IsTruncated && this.delimiter)
|
|
||||||
? this.nextMarker : undefined;
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,127 +0,0 @@
|
||||||
const { DelimiterMaster } = require('./delimiterMaster');
|
|
||||||
const { FILTER_ACCEPT, FILTER_END } = require('./tools');
|
|
||||||
|
|
||||||
type ResultObject = {
|
|
||||||
Contents: {
|
|
||||||
key: string;
|
|
||||||
value: string;
|
|
||||||
}[];
|
|
||||||
IsTruncated: boolean;
|
|
||||||
NextMarker ?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters. This extends the base class DelimiterMaster
|
|
||||||
* to return the master/current versions.
|
|
||||||
*/
|
|
||||||
class DelimiterCurrent extends DelimiterMaster {
|
|
||||||
/**
|
|
||||||
* Delimiter listing of current versions.
|
|
||||||
* @param {Object} parameters - listing parameters
|
|
||||||
* @param {String} parameters.beforeDate - limit the response to keys older than beforeDate
|
|
||||||
* @param {String} parameters.excludedDataStoreName - excluded datatore name
|
|
||||||
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
|
||||||
* @param {RequestLogger} logger - The logger of the request
|
|
||||||
* @param {String} [vFormat] - versioning key format
|
|
||||||
*/
|
|
||||||
constructor(parameters, logger, vFormat) {
|
|
||||||
super(parameters, logger, vFormat);
|
|
||||||
|
|
||||||
this.beforeDate = parameters.beforeDate;
|
|
||||||
this.excludedDataStoreName = parameters.excludedDataStoreName;
|
|
||||||
this.maxScannedLifecycleListingEntries = parameters.maxScannedLifecycleListingEntries;
|
|
||||||
this.scannedKeys = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV0() {
|
|
||||||
const params = super.genMDParamsV0();
|
|
||||||
// lastModified and dataStoreName parameters are used by metadata that enables built-in filtering,
|
|
||||||
// a feature currently exclusive to MongoDB
|
|
||||||
if (this.beforeDate) {
|
|
||||||
params.lastModified = {
|
|
||||||
lt: this.beforeDate,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.excludedDataStoreName) {
|
|
||||||
params.dataStoreName = {
|
|
||||||
ne: this.excludedDataStoreName,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parses the stringified entry's value.
|
|
||||||
* @param s - sringified value
|
|
||||||
* @return - undefined if parsing fails, otherwise it contains the parsed value.
|
|
||||||
*/
|
|
||||||
_parse(s) {
|
|
||||||
let p;
|
|
||||||
try {
|
|
||||||
p = JSON.parse(s);
|
|
||||||
} catch (e: any) {
|
|
||||||
this.logger.warn(
|
|
||||||
'Could not parse Object Metadata while listing',
|
|
||||||
{ err: e.toString() });
|
|
||||||
}
|
|
||||||
return p;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* check if the max keys count has been reached and set the
|
|
||||||
* final state of the result if it is the case
|
|
||||||
*
|
|
||||||
* specialized implementation on DelimiterCurrent to also check
|
|
||||||
* the number of scanned keys
|
|
||||||
*
|
|
||||||
* @return {Boolean} - indicates if the iteration has to stop
|
|
||||||
*/
|
|
||||||
_reachedMaxKeys(): boolean {
|
|
||||||
if (this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries) {
|
|
||||||
this.IsTruncated = true;
|
|
||||||
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
|
||||||
{
|
|
||||||
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
|
||||||
scannedKeys: this.scannedKeys,
|
|
||||||
});
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return super._reachedMaxKeys();
|
|
||||||
}
|
|
||||||
|
|
||||||
addContents(key, value) {
|
|
||||||
++this.scannedKeys;
|
|
||||||
const parsedValue = this._parse(value);
|
|
||||||
// if parsing fails, skip the key.
|
|
||||||
if (parsedValue) {
|
|
||||||
const lastModified = parsedValue['last-modified'];
|
|
||||||
const dataStoreName = parsedValue.dataStoreName;
|
|
||||||
// We then check if the current version is older than the "beforeDate" and
|
|
||||||
// "excludedDataStoreName" is not specified or if specified and the data store name is different.
|
|
||||||
if ((!this.beforeDate || (lastModified && lastModified < this.beforeDate)) &&
|
|
||||||
(!this.excludedDataStoreName || dataStoreName !== this.excludedDataStoreName)) {
|
|
||||||
super.addContents(key, value);
|
|
||||||
}
|
|
||||||
// In the event of a timeout occurring before any content is added,
|
|
||||||
// NextMarker is updated even if the object is not eligible.
|
|
||||||
// It minimizes the amount of data that the client needs to re-process if the request times out.
|
|
||||||
this.nextMarker = key;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result(): object {
|
|
||||||
const result: ResultObject = {
|
|
||||||
Contents: this.Contents,
|
|
||||||
IsTruncated: this.IsTruncated,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (this.IsTruncated) {
|
|
||||||
result.NextMarker = this.nextMarker;
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
module.exports = { DelimiterCurrent };
|
|
|
@ -1,620 +0,0 @@
|
||||||
import {
|
|
||||||
Delimiter,
|
|
||||||
FilterState,
|
|
||||||
FilterReturnValue,
|
|
||||||
DelimiterFilterStateId,
|
|
||||||
DelimiterFilterState_NotSkipping,
|
|
||||||
DelimiterFilterState_SkippingPrefix,
|
|
||||||
ResultObject,
|
|
||||||
} from './delimiter';
|
|
||||||
const Version = require('../../versioning/Version').Version;
|
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
|
||||||
const { BucketVersioningKeyFormat } = VSConst;
|
|
||||||
const { FILTER_ACCEPT, FILTER_SKIP, FILTER_END, SKIP_NONE, inc } = require('./tools');
|
|
||||||
|
|
||||||
import { GapSetEntry } from '../cache/GapSet';
|
|
||||||
import { GapCacheInterface } from '../cache/GapCache';
|
|
||||||
|
|
||||||
const VID_SEP = VSConst.VersionId.Separator;
|
|
||||||
const { DbPrefixes } = VSConst;
|
|
||||||
|
|
||||||
export const enum DelimiterMasterFilterStateId {
|
|
||||||
SkippingVersionsV0 = 101,
|
|
||||||
WaitVersionAfterPHDV0 = 102,
|
|
||||||
SkippingGapV0 = 103,
|
|
||||||
};
|
|
||||||
|
|
||||||
interface DelimiterMasterFilterState_SkippingVersionsV0 extends FilterState {
|
|
||||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
|
||||||
masterKey: string,
|
|
||||||
};
|
|
||||||
|
|
||||||
interface DelimiterMasterFilterState_WaitVersionAfterPHDV0 extends FilterState {
|
|
||||||
id: DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
|
||||||
masterKey: string,
|
|
||||||
};
|
|
||||||
|
|
||||||
interface DelimiterMasterFilterState_SkippingGapV0 extends FilterState {
|
|
||||||
id: DelimiterMasterFilterStateId.SkippingGapV0,
|
|
||||||
};
|
|
||||||
|
|
||||||
export const enum GapCachingState {
|
|
||||||
NoGapCache = 0, // there is no gap cache
|
|
||||||
UnknownGap = 1, // waiting for a cache lookup
|
|
||||||
GapLookupInProgress = 2, // asynchronous gap lookup in progress
|
|
||||||
GapCached = 3, // an upcoming or already skippable gap is cached
|
|
||||||
NoMoreGap = 4, // the cache doesn't have any more gaps inside the listed range
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapCachingInfo_NoGapCache = {
|
|
||||||
state: GapCachingState.NoGapCache;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapCachingInfo_NoCachedGap = {
|
|
||||||
state: GapCachingState.UnknownGap
|
|
||||||
| GapCachingState.GapLookupInProgress
|
|
||||||
gapCache: GapCacheInterface;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapCachingInfo_GapCached = {
|
|
||||||
state: GapCachingState.GapCached;
|
|
||||||
gapCache: GapCacheInterface;
|
|
||||||
gapCached: GapSetEntry;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapCachingInfo_NoMoreGap = {
|
|
||||||
state: GapCachingState.NoMoreGap;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapCachingInfo = GapCachingInfo_NoGapCache
|
|
||||||
| GapCachingInfo_NoCachedGap
|
|
||||||
| GapCachingInfo_GapCached
|
|
||||||
| GapCachingInfo_NoMoreGap;
|
|
||||||
|
|
||||||
|
|
||||||
export const enum GapBuildingState {
|
|
||||||
Disabled = 0, // no gap cache or no gap building needed (e.g. in V1 versioning format)
|
|
||||||
NotBuilding = 1, // not currently building a gap (i.e. not listing within a gap)
|
|
||||||
Building = 2, // currently building a gap (i.e. listing within a gap)
|
|
||||||
Expired = 3, // not allowed to build due to exposure delay timeout
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapBuildingInfo_NothingToBuild = {
|
|
||||||
state: GapBuildingState.Disabled | GapBuildingState.Expired;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapBuildingParams = {
|
|
||||||
/**
|
|
||||||
* minimum weight for a gap to be created in the cache
|
|
||||||
*/
|
|
||||||
minGapWeight: number;
|
|
||||||
/**
|
|
||||||
* trigger a cache setGap() call every N skippable keys
|
|
||||||
*/
|
|
||||||
triggerSaveGapWeight: number;
|
|
||||||
/**
|
|
||||||
* timestamp to assess whether we're still inside the validity period to
|
|
||||||
* be allowed to build gaps
|
|
||||||
*/
|
|
||||||
initTimestamp: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapBuildingInfo_NotBuilding = {
|
|
||||||
state: GapBuildingState.NotBuilding;
|
|
||||||
gapCache: GapCacheInterface;
|
|
||||||
params: GapBuildingParams;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapBuildingInfo_Building = {
|
|
||||||
state: GapBuildingState.Building;
|
|
||||||
gapCache: GapCacheInterface;
|
|
||||||
params: GapBuildingParams;
|
|
||||||
/**
|
|
||||||
* Gap currently being created
|
|
||||||
*/
|
|
||||||
gap: GapSetEntry;
|
|
||||||
/**
|
|
||||||
* total current weight of the gap being created
|
|
||||||
*/
|
|
||||||
gapWeight: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapBuildingInfo = GapBuildingInfo_NothingToBuild
|
|
||||||
| GapBuildingInfo_NotBuilding
|
|
||||||
| GapBuildingInfo_Building;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters. This extends the base class Delimiter
|
|
||||||
* to return the raw master versions of existing objects.
|
|
||||||
*/
|
|
||||||
export class DelimiterMaster extends Delimiter {
|
|
||||||
|
|
||||||
_gapCaching: GapCachingInfo;
|
|
||||||
_gapBuilding: GapBuildingInfo;
|
|
||||||
_refreshedBuildingParams: GapBuildingParams | null;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delimiter listing of master versions.
|
|
||||||
* @param {Object} parameters - listing parameters
|
|
||||||
* @param {String} [parameters.delimiter] - delimiter per amazon format
|
|
||||||
* @param {String} [parameters.prefix] - prefix per amazon format
|
|
||||||
* @param {String} [parameters.marker] - marker per amazon format
|
|
||||||
* @param {Number} [parameters.maxKeys] - number of keys to list
|
|
||||||
* @param {Boolean} [parameters.v2] - indicates whether v2 format
|
|
||||||
* @param {String} [parameters.startAfter] - marker per amazon v2 format
|
|
||||||
* @param {String} [parameters.continuationToken] - obfuscated amazon token
|
|
||||||
* @param {RequestLogger} logger - The logger of the request
|
|
||||||
* @param {String} [vFormat="v0"] - versioning key format
|
|
||||||
*/
|
|
||||||
constructor(parameters, logger, vFormat?: string) {
|
|
||||||
super(parameters, logger, vFormat);
|
|
||||||
|
|
||||||
if (this.vFormat === BucketVersioningKeyFormat.v0) {
|
|
||||||
// override Delimiter's implementation of NotSkipping for
|
|
||||||
// DelimiterMaster logic (skipping versions and special
|
|
||||||
// handling of delete markers and PHDs)
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterFilterStateId.NotSkipping,
|
|
||||||
this.keyHandler_NotSkippingPrefixNorVersionsV0.bind(this));
|
|
||||||
|
|
||||||
// add extra state handlers specific to DelimiterMaster with v0 format
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterMasterFilterStateId.SkippingVersionsV0,
|
|
||||||
this.keyHandler_SkippingVersionsV0.bind(this));
|
|
||||||
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
|
||||||
this.keyHandler_WaitVersionAfterPHDV0.bind(this));
|
|
||||||
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterMasterFilterStateId.SkippingGapV0,
|
|
||||||
this.keyHandler_SkippingGapV0.bind(this));
|
|
||||||
|
|
||||||
if (this.marker) {
|
|
||||||
// distinct initial state to include some special logic
|
|
||||||
// before the first master key is found that does not have
|
|
||||||
// to be checked afterwards
|
|
||||||
this.state = <DelimiterMasterFilterState_SkippingVersionsV0> {
|
|
||||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
|
||||||
masterKey: this.marker,
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
this.state = <DelimiterFilterState_NotSkipping> {
|
|
||||||
id: DelimiterFilterStateId.NotSkipping,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// save base implementation of the `NotSkipping` state in
|
|
||||||
// Delimiter before overriding it with ours, to be able to call it from there
|
|
||||||
this.keyHandler_NotSkipping_Delimiter = this.keyHandlers[DelimiterFilterStateId.NotSkipping];
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterFilterStateId.NotSkipping,
|
|
||||||
this.keyHandler_NotSkippingPrefixNorVersionsV1.bind(this));
|
|
||||||
}
|
|
||||||
// in v1, we can directly use Delimiter's implementation,
|
|
||||||
// which is already set to the proper state
|
|
||||||
|
|
||||||
// default initialization of the gap cache and building states, can be
|
|
||||||
// set by refreshGapCache()
|
|
||||||
this._gapCaching = {
|
|
||||||
state: GapCachingState.NoGapCache,
|
|
||||||
};
|
|
||||||
this._gapBuilding = {
|
|
||||||
state: GapBuildingState.Disabled,
|
|
||||||
};
|
|
||||||
this._refreshedBuildingParams = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the validity period left before a refresh of the gap cache is needed
|
|
||||||
* to continue building new gaps.
|
|
||||||
*
|
|
||||||
* @return {number|null} one of:
|
|
||||||
* - the remaining time in milliseconds in which gaps can be added to the
|
|
||||||
* cache before a call to refreshGapCache() is required
|
|
||||||
* - or 0 if there is no time left and a call to refreshGapCache() is required
|
|
||||||
* to resume caching gaps
|
|
||||||
* - or null if refreshing the cache is never needed (because the gap cache
|
|
||||||
* is either not available or not used)
|
|
||||||
*/
|
|
||||||
getGapBuildingValidityPeriodMs(): number | null {
|
|
||||||
let gapBuilding;
|
|
||||||
switch (this._gapBuilding.state) {
|
|
||||||
case GapBuildingState.Disabled:
|
|
||||||
return null;
|
|
||||||
case GapBuildingState.Expired:
|
|
||||||
return 0;
|
|
||||||
case GapBuildingState.NotBuilding:
|
|
||||||
gapBuilding = <GapBuildingInfo_NotBuilding> this._gapBuilding;
|
|
||||||
break;
|
|
||||||
case GapBuildingState.Building:
|
|
||||||
gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
const { gapCache, params } = gapBuilding;
|
|
||||||
const elapsedTime = Date.now() - params.initTimestamp;
|
|
||||||
return Math.max(gapCache.exposureDelayMs - elapsedTime, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Refresh the gaps caching logic (gaps are series of current delete markers
|
|
||||||
* in V0 bucket metadata format). It has two effects:
|
|
||||||
*
|
|
||||||
* - starts exposing existing and future gaps from the cache to efficiently
|
|
||||||
* skip over series of current delete markers that have been seen and cached
|
|
||||||
* earlier
|
|
||||||
*
|
|
||||||
* - enables building and caching new gaps (or extend existing ones), for a
|
|
||||||
* limited time period defined by the `gapCacheProxy.exposureDelayMs` value
|
|
||||||
* in milliseconds. To refresh the validity period and resume building and
|
|
||||||
* caching new gaps, one must restart a new listing from the database (starting
|
|
||||||
* at the current listing key, included), then call refreshGapCache() again.
|
|
||||||
*
|
|
||||||
* @param {GapCacheInterface} gapCacheProxy - API proxy to the gaps cache
|
|
||||||
* (the proxy should handle prefixing object keys with the bucket name)
|
|
||||||
* @param {number} [minGapWeight=100] - minimum weight of a gap for it to be
|
|
||||||
* added in the cache
|
|
||||||
* @param {number} [triggerSaveGapWeight] - cumulative weight to wait for
|
|
||||||
* before saving the current building gap. Cannot be greater than
|
|
||||||
* `gapCacheProxy.maxGapWeight` (the value is thresholded to `maxGapWeight`
|
|
||||||
* otherwise). Defaults to `gapCacheProxy.maxGapWeight / 2`.
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
refreshGapCache(
|
|
||||||
gapCacheProxy: GapCacheInterface,
|
|
||||||
minGapWeight?: number,
|
|
||||||
triggerSaveGapWeight?: number
|
|
||||||
): void {
|
|
||||||
if (this.vFormat !== BucketVersioningKeyFormat.v0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (this._gapCaching.state === GapCachingState.NoGapCache) {
|
|
||||||
this._gapCaching = {
|
|
||||||
state: GapCachingState.UnknownGap,
|
|
||||||
gapCache: gapCacheProxy,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const refreshedBuildingParams: GapBuildingParams = {
|
|
||||||
minGapWeight: minGapWeight || 100,
|
|
||||||
triggerSaveGapWeight: triggerSaveGapWeight
|
|
||||||
|| Math.trunc(gapCacheProxy.maxGapWeight / 2),
|
|
||||||
initTimestamp: Date.now(),
|
|
||||||
};
|
|
||||||
if (this._gapBuilding.state === GapBuildingState.Building) {
|
|
||||||
// refreshed params will be applied as soon as the current building gap is saved
|
|
||||||
this._refreshedBuildingParams = refreshedBuildingParams;
|
|
||||||
} else {
|
|
||||||
this._gapBuilding = {
|
|
||||||
state: GapBuildingState.NotBuilding,
|
|
||||||
gapCache: gapCacheProxy,
|
|
||||||
params: refreshedBuildingParams,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Trigger a lookup of the closest upcoming or already skippable gap.
|
|
||||||
*
|
|
||||||
* @param {string} fromKey - lookup a gap not before 'fromKey'
|
|
||||||
* @return {undefined} - the lookup is asynchronous and its
|
|
||||||
* response is handled inside this function
|
|
||||||
*/
|
|
||||||
_triggerGapLookup(gapCaching: GapCachingInfo_NoCachedGap, fromKey: string): void {
|
|
||||||
this._gapCaching = {
|
|
||||||
state: GapCachingState.GapLookupInProgress,
|
|
||||||
gapCache: gapCaching.gapCache,
|
|
||||||
};
|
|
||||||
const maxKey = this.prefix ? inc(this.prefix) : undefined;
|
|
||||||
gapCaching.gapCache.lookupGap(fromKey, maxKey).then(_gap => {
|
|
||||||
const gap = <GapSetEntry | null> _gap;
|
|
||||||
if (gap) {
|
|
||||||
this._gapCaching = {
|
|
||||||
state: GapCachingState.GapCached,
|
|
||||||
gapCache: gapCaching.gapCache,
|
|
||||||
gapCached: gap,
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
this._gapCaching = {
|
|
||||||
state: GapCachingState.NoMoreGap,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
_checkGapOnMasterDeleteMarker(key: string): FilterReturnValue {
|
|
||||||
switch (this._gapBuilding.state) {
|
|
||||||
case GapBuildingState.Disabled:
|
|
||||||
case GapBuildingState.Expired:
|
|
||||||
break;
|
|
||||||
case GapBuildingState.NotBuilding:
|
|
||||||
this._createBuildingGap(key, 1);
|
|
||||||
break;
|
|
||||||
case GapBuildingState.Building:
|
|
||||||
this._updateBuildingGap(key);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (this._gapCaching.state === GapCachingState.GapCached) {
|
|
||||||
const { gapCached } = this._gapCaching;
|
|
||||||
if (key >= gapCached.firstKey) {
|
|
||||||
if (key <= gapCached.lastKey) {
|
|
||||||
// we are inside the last looked up cached gap: transition to
|
|
||||||
// 'SkippingGapV0' state
|
|
||||||
this.setState(<DelimiterMasterFilterState_SkippingGapV0> {
|
|
||||||
id: DelimiterMasterFilterStateId.SkippingGapV0,
|
|
||||||
});
|
|
||||||
// cut the current gap before skipping, it will be merged or
|
|
||||||
// chained with the existing one (depending on its weight)
|
|
||||||
if (this._gapBuilding.state === GapBuildingState.Building) {
|
|
||||||
// substract 1 from the weight because we are going to chain this gap,
|
|
||||||
// which has an overlap of one key.
|
|
||||||
this._gapBuilding.gap.weight -= 1;
|
|
||||||
this._cutBuildingGap();
|
|
||||||
}
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
// as we are past the cached gap, we will need another lookup
|
|
||||||
this._gapCaching = {
|
|
||||||
state: GapCachingState.UnknownGap,
|
|
||||||
gapCache: this._gapCaching.gapCache,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (this._gapCaching.state === GapCachingState.UnknownGap) {
|
|
||||||
this._triggerGapLookup(this._gapCaching, key);
|
|
||||||
}
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
filter_onNewMasterKeyV0(key: string, value: string): FilterReturnValue {
|
|
||||||
// if this master key is a delete marker, accept it without
|
|
||||||
// adding the version to the contents
|
|
||||||
if (Version.isDeleteMarker(value)) {
|
|
||||||
// update the state to start skipping versions of the new master key
|
|
||||||
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
|
||||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
|
||||||
masterKey: key,
|
|
||||||
});
|
|
||||||
return this._checkGapOnMasterDeleteMarker(key);
|
|
||||||
}
|
|
||||||
if (Version.isPHD(value)) {
|
|
||||||
// master version is a PHD version: wait for the first
|
|
||||||
// following version that will be considered as the actual
|
|
||||||
// master key
|
|
||||||
this.setState(<DelimiterMasterFilterState_WaitVersionAfterPHDV0> {
|
|
||||||
id: DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
|
||||||
masterKey: key,
|
|
||||||
});
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
// cut the current gap as soon as a non-deleted entry is seen
|
|
||||||
this._cutBuildingGap();
|
|
||||||
|
|
||||||
if (key.startsWith(DbPrefixes.Replay)) {
|
|
||||||
// skip internal replay prefix entirely
|
|
||||||
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
|
||||||
id: DelimiterFilterStateId.SkippingPrefix,
|
|
||||||
prefix: DbPrefixes.Replay,
|
|
||||||
});
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
if (this._reachedMaxKeys()) {
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
|
|
||||||
const commonPrefix = this.addCommonPrefixOrContents(key, value);
|
|
||||||
if (commonPrefix) {
|
|
||||||
// transition into SkippingPrefix state to skip all following keys
|
|
||||||
// while they start with the same prefix
|
|
||||||
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
|
||||||
id: DelimiterFilterStateId.SkippingPrefix,
|
|
||||||
prefix: commonPrefix,
|
|
||||||
});
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
// update the state to start skipping versions of the new master key
|
|
||||||
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
|
||||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
|
||||||
masterKey: key,
|
|
||||||
});
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_NotSkippingPrefixNorVersionsV0(key: string, value: string): FilterReturnValue {
|
|
||||||
return this.filter_onNewMasterKeyV0(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
filter_onNewMasterKeyV1(key: string, value: string): FilterReturnValue {
|
|
||||||
// if this master key is a delete marker, accept it without
|
|
||||||
// adding the version to the contents
|
|
||||||
if (Version.isDeleteMarker(value)) {
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
// use base Delimiter's implementation
|
|
||||||
return this.keyHandler_NotSkipping_Delimiter(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_NotSkippingPrefixNorVersionsV1(key: string, value: string): FilterReturnValue {
|
|
||||||
return this.filter_onNewMasterKeyV1(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_SkippingVersionsV0(key: string, value: string): FilterReturnValue {
|
|
||||||
/* In the SkippingVersionsV0 state, skip all version keys
|
|
||||||
* (<key><versionIdSeparator><version>) */
|
|
||||||
const versionIdIndex = key.indexOf(VID_SEP);
|
|
||||||
if (versionIdIndex !== -1) {
|
|
||||||
// version keys count in the building gap weight because they must
|
|
||||||
// also be listed until skipped
|
|
||||||
if (this._gapBuilding.state === GapBuildingState.Building) {
|
|
||||||
this._updateBuildingGap(key);
|
|
||||||
}
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
return this.filter_onNewMasterKeyV0(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_WaitVersionAfterPHDV0(key: string, value: string): FilterReturnValue {
|
|
||||||
// After a PHD key is encountered, the next version key of the
|
|
||||||
// same object if it exists is the new master key, hence
|
|
||||||
// consider it as such and call 'onNewMasterKeyV0' (the test
|
|
||||||
// 'masterKey == phdKey' is probably redundant when we already
|
|
||||||
// know we have a versioned key, since all objects in v0 have
|
|
||||||
// a master key, but keeping it in doubt)
|
|
||||||
const { masterKey: phdKey } = <DelimiterMasterFilterState_WaitVersionAfterPHDV0> this.state;
|
|
||||||
const versionIdIndex = key.indexOf(VID_SEP);
|
|
||||||
if (versionIdIndex !== -1) {
|
|
||||||
const masterKey = key.slice(0, versionIdIndex);
|
|
||||||
if (masterKey === phdKey) {
|
|
||||||
return this.filter_onNewMasterKeyV0(masterKey, value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return this.filter_onNewMasterKeyV0(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_SkippingGapV0(key: string, value: string): FilterReturnValue {
|
|
||||||
const { gapCache, gapCached } = <GapCachingInfo_GapCached> this._gapCaching;
|
|
||||||
if (key <= gapCached.lastKey) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
this._gapCaching = {
|
|
||||||
state: GapCachingState.UnknownGap,
|
|
||||||
gapCache,
|
|
||||||
};
|
|
||||||
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
|
||||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
|
||||||
});
|
|
||||||
// Start a gap with weight=0 from the latest skippable key. This will
|
|
||||||
// allow to extend the gap just skipped with a chained gap in case
|
|
||||||
// other delete markers are seen after the existing gap is skipped.
|
|
||||||
this._createBuildingGap(gapCached.lastKey, 0, gapCached.weight);
|
|
||||||
|
|
||||||
return this.handleKey(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingBase(): string | undefined {
|
|
||||||
switch (this.state.id) {
|
|
||||||
case DelimiterMasterFilterStateId.SkippingVersionsV0:
|
|
||||||
const { masterKey } = <DelimiterMasterFilterState_SkippingVersionsV0> this.state;
|
|
||||||
return masterKey + inc(VID_SEP);
|
|
||||||
|
|
||||||
case DelimiterMasterFilterStateId.SkippingGapV0:
|
|
||||||
const { gapCached } = <GapCachingInfo_GapCached> this._gapCaching;
|
|
||||||
return gapCached.lastKey;
|
|
||||||
|
|
||||||
default:
|
|
||||||
return super.skippingBase();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result(): ResultObject {
|
|
||||||
this._cutBuildingGap();
|
|
||||||
return super.result();
|
|
||||||
}
|
|
||||||
|
|
||||||
_checkRefreshedBuildingParams(params: GapBuildingParams): GapBuildingParams {
|
|
||||||
if (this._refreshedBuildingParams) {
|
|
||||||
const newParams = this._refreshedBuildingParams;
|
|
||||||
this._refreshedBuildingParams = null;
|
|
||||||
return newParams;
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Save the gap being built if allowed (i.e. still within the
|
|
||||||
* allocated exposure time window).
|
|
||||||
*
|
|
||||||
* @return {boolean} - true if the gap was saved, false if we are
|
|
||||||
* outside the allocated exposure time window.
|
|
||||||
*/
|
|
||||||
_saveBuildingGap(): boolean {
|
|
||||||
const { gapCache, params, gap, gapWeight } =
|
|
||||||
<GapBuildingInfo_Building> this._gapBuilding;
|
|
||||||
const totalElapsed = Date.now() - params.initTimestamp;
|
|
||||||
if (totalElapsed >= gapCache.exposureDelayMs) {
|
|
||||||
this._gapBuilding = {
|
|
||||||
state: GapBuildingState.Expired,
|
|
||||||
};
|
|
||||||
this._refreshedBuildingParams = null;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
const { firstKey, lastKey, weight } = gap;
|
|
||||||
gapCache.setGap(firstKey, lastKey, weight);
|
|
||||||
this._gapBuilding = {
|
|
||||||
state: GapBuildingState.Building,
|
|
||||||
gapCache,
|
|
||||||
params: this._checkRefreshedBuildingParams(params),
|
|
||||||
gap: {
|
|
||||||
firstKey: gap.lastKey,
|
|
||||||
lastKey: gap.lastKey,
|
|
||||||
weight: 0,
|
|
||||||
},
|
|
||||||
gapWeight,
|
|
||||||
};
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a new gap to be extended afterwards
|
|
||||||
*
|
|
||||||
* @param {string} newKey - gap's first key
|
|
||||||
* @param {number} startWeight - initial weight of the building gap (usually 0 or 1)
|
|
||||||
* @param {number} [cachedWeight] - if continuing a cached gap, weight of the existing
|
|
||||||
* cached portion
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
_createBuildingGap(newKey: string, startWeight: number, cachedWeight?: number): void {
|
|
||||||
if (this._gapBuilding.state === GapBuildingState.NotBuilding) {
|
|
||||||
const { gapCache, params } = <GapBuildingInfo_NotBuilding> this._gapBuilding;
|
|
||||||
this._gapBuilding = {
|
|
||||||
state: GapBuildingState.Building,
|
|
||||||
gapCache,
|
|
||||||
params: this._checkRefreshedBuildingParams(params),
|
|
||||||
gap: {
|
|
||||||
firstKey: newKey,
|
|
||||||
lastKey: newKey,
|
|
||||||
weight: startWeight,
|
|
||||||
},
|
|
||||||
gapWeight: (cachedWeight || 0) + startWeight,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_updateBuildingGap(newKey: string): void {
|
|
||||||
const gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
|
||||||
const { params, gap } = gapBuilding;
|
|
||||||
gap.lastKey = newKey;
|
|
||||||
gap.weight += 1;
|
|
||||||
gapBuilding.gapWeight += 1;
|
|
||||||
// the GapCache API requires updating a gap regularly because it can only split
|
|
||||||
// it once per update, by the known last key. In practice the default behavior
|
|
||||||
// is to trigger an update after a number of keys that is half the maximum weight.
|
|
||||||
// It is also useful for other listings to benefit from the cache sooner.
|
|
||||||
if (gapBuilding.gapWeight >= params.minGapWeight &&
|
|
||||||
gap.weight >= params.triggerSaveGapWeight) {
|
|
||||||
this._saveBuildingGap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_cutBuildingGap(): void {
|
|
||||||
if (this._gapBuilding.state === GapBuildingState.Building) {
|
|
||||||
let gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
|
||||||
let { gapCache, params, gap, gapWeight } = gapBuilding;
|
|
||||||
// only set gaps that are significant enough in weight and
|
|
||||||
// with a non-empty extension
|
|
||||||
if (gapWeight >= params.minGapWeight && gap.weight > 0) {
|
|
||||||
// we're done if we were not allowed to save the gap
|
|
||||||
if (!this._saveBuildingGap()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
// params may have been refreshed, reload them
|
|
||||||
gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
|
||||||
params = gapBuilding.params;
|
|
||||||
}
|
|
||||||
this._gapBuilding = {
|
|
||||||
state: GapBuildingState.NotBuilding,
|
|
||||||
gapCache,
|
|
||||||
params,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,202 +0,0 @@
|
||||||
const { DelimiterVersions } = require('./delimiterVersions');
|
|
||||||
const { FILTER_END, FILTER_SKIP } = require('./tools');
|
|
||||||
|
|
||||||
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters. This extends the base class DelimiterVersions
|
|
||||||
* to return the raw non-current versions objects.
|
|
||||||
*/
|
|
||||||
class DelimiterNonCurrent extends DelimiterVersions {
|
|
||||||
/**
|
|
||||||
* Delimiter listing of non-current versions.
|
|
||||||
* @param {Object} parameters - listing parameters
|
|
||||||
* @param {String} parameters.keyMarker - key marker
|
|
||||||
* @param {String} parameters.versionIdMarker - version id marker
|
|
||||||
* @param {String} parameters.beforeDate - limit the response to keys with stale date older than beforeDate.
|
|
||||||
* “stale date” is the date on when a version becomes non-current.
|
|
||||||
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
|
||||||
* @param {String} parameters.excludedDataStoreName - exclude dataStoreName matches from the versions
|
|
||||||
* @param {RequestLogger} logger - The logger of the request
|
|
||||||
* @param {String} [vFormat] - versioning key format
|
|
||||||
*/
|
|
||||||
constructor(parameters, logger, vFormat) {
|
|
||||||
super(parameters, logger, vFormat);
|
|
||||||
|
|
||||||
this.beforeDate = parameters.beforeDate;
|
|
||||||
this.excludedDataStoreName = parameters.excludedDataStoreName;
|
|
||||||
this.maxScannedLifecycleListingEntries = parameters.maxScannedLifecycleListingEntries;
|
|
||||||
|
|
||||||
// internal state
|
|
||||||
this.prevKey = null;
|
|
||||||
this.staleDate = null;
|
|
||||||
|
|
||||||
this.scannedKeys = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
getLastModified(value) {
|
|
||||||
let lastModified;
|
|
||||||
try {
|
|
||||||
const v = JSON.parse(value);
|
|
||||||
lastModified = v['last-modified'];
|
|
||||||
} catch (e) {
|
|
||||||
this.logger.warn('could not parse Object Metadata while listing',
|
|
||||||
{
|
|
||||||
method: 'getLastModified',
|
|
||||||
err: e.toString(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return lastModified;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Overwrite keyHandler_SkippingVersions to include the last version from the previous listing.
|
|
||||||
// The creation (last-modified) date of this version will be the stale date for the following version.
|
|
||||||
// eslint-disable-next-line camelcase
|
|
||||||
keyHandler_SkippingVersions(key, versionId, value) {
|
|
||||||
if (key === this.keyMarker) {
|
|
||||||
// since the nonversioned key equals the marker, there is
|
|
||||||
// necessarily a versionId in this key
|
|
||||||
const _versionId = versionId;
|
|
||||||
if (_versionId < this.versionIdMarker) {
|
|
||||||
// skip all versions until marker
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this.setState({
|
|
||||||
id: 1 /* NotSkipping */,
|
|
||||||
});
|
|
||||||
return this.handleKey(key, versionId, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
filter(obj) {
|
|
||||||
if (this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries) {
|
|
||||||
this.IsTruncated = true;
|
|
||||||
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
|
||||||
{
|
|
||||||
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
|
||||||
scannedKeys: this.scannedKeys,
|
|
||||||
});
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
++this.scannedKeys;
|
|
||||||
return super.filter(obj);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* NOTE: Each version of a specific key is sorted from the latest to the oldest
|
|
||||||
* thanks to the way version ids are generated.
|
|
||||||
* DESCRIPTION: Skip the version if it represents the master key, but keep its last-modified date in memory,
|
|
||||||
* which will be the stale date of the following version.
|
|
||||||
* The following version is pushed only:
|
|
||||||
* - if the "stale date" (picked up from the previous version) is available (JSON.parse has not failed),
|
|
||||||
* - if "beforeDate" is not specified or if specified and the "stale date" is older.
|
|
||||||
* - if "excludedDataStoreName" is not specified or if specified and the data store name is different
|
|
||||||
* The in-memory "stale date" is then updated with the version's last-modified date to be used for
|
|
||||||
* the following version.
|
|
||||||
* The process stops and returns the available results if either:
|
|
||||||
* - no more metadata key is left to be processed
|
|
||||||
* - the listing reaches the maximum number of key to be returned
|
|
||||||
* - the internal timeout is reached
|
|
||||||
* @param {String} key - The key to add
|
|
||||||
* @param {String} versionId - The version id
|
|
||||||
* @param {String} value - The value of the key
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addVersion(key, versionId, value) {
|
|
||||||
this.nextKeyMarker = key;
|
|
||||||
this.nextVersionIdMarker = versionId;
|
|
||||||
|
|
||||||
// Skip the version if it represents the non-current version, but keep its last-modified date,
|
|
||||||
// which will be the stale date of the following version.
|
|
||||||
const isCurrentVersion = key !== this.prevKey;
|
|
||||||
if (isCurrentVersion) {
|
|
||||||
this.staleDate = this.getLastModified(value);
|
|
||||||
this.prevKey = key;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// The following version is pushed only:
|
|
||||||
// - if the "stale date" (picked up from the previous version) is available (JSON.parse has not failed),
|
|
||||||
// - if "beforeDate" is not specified or if specified and the "stale date" is older.
|
|
||||||
// - if "excludedDataStoreName" is not specified or if specified and the data store name is different
|
|
||||||
let lastModified;
|
|
||||||
if (this.staleDate && (!this.beforeDate || this.staleDate < this.beforeDate)) {
|
|
||||||
const parsedValue = this._parse(value);
|
|
||||||
// if parsing fails, skip the key.
|
|
||||||
if (parsedValue) {
|
|
||||||
const dataStoreName = parsedValue.dataStoreName;
|
|
||||||
lastModified = parsedValue['last-modified'];
|
|
||||||
if (!this.excludedDataStoreName || dataStoreName !== this.excludedDataStoreName) {
|
|
||||||
const s = this._stringify(parsedValue, this.staleDate);
|
|
||||||
// check that _stringify succeeds to only push objects with a defined staleDate.
|
|
||||||
if (s) {
|
|
||||||
this.Versions.push({ key, value: s });
|
|
||||||
++this.keys;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// The in-memory "stale date" is then updated with the version's last-modified date to be used for
|
|
||||||
// the following version.
|
|
||||||
this.staleDate = lastModified || this.getLastModified(value);
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parses the stringified entry's value and remove the location property if too large.
|
|
||||||
* @param {string} s - sringified value
|
|
||||||
* @return {object} p - undefined if parsing fails, otherwise it contains the parsed value.
|
|
||||||
*/
|
|
||||||
_parse(s) {
|
|
||||||
let p;
|
|
||||||
try {
|
|
||||||
p = JSON.parse(s);
|
|
||||||
if (s.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
|
||||||
delete p.location;
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
this.logger.warn('Could not parse Object Metadata while listing', {
|
|
||||||
method: 'DelimiterNonCurrent._parse',
|
|
||||||
err: e.toString(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return p;
|
|
||||||
}
|
|
||||||
|
|
||||||
_stringify(parsedMD, staleDate) {
|
|
||||||
const p = parsedMD;
|
|
||||||
let s = undefined;
|
|
||||||
p.staleDate = staleDate;
|
|
||||||
try {
|
|
||||||
s = JSON.stringify(p);
|
|
||||||
} catch (e) {
|
|
||||||
this.logger.warn('could not stringify Object Metadata while listing', {
|
|
||||||
method: 'DelimiterNonCurrent._stringify',
|
|
||||||
err: e.toString(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return s;
|
|
||||||
}
|
|
||||||
|
|
||||||
result() {
|
|
||||||
const { Versions, IsTruncated, NextKeyMarker, NextVersionIdMarker } = super.result();
|
|
||||||
|
|
||||||
const result = {
|
|
||||||
Contents: Versions,
|
|
||||||
IsTruncated,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (NextKeyMarker) {
|
|
||||||
result.NextKeyMarker = NextKeyMarker;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (NextVersionIdMarker) {
|
|
||||||
result.NextVersionIdMarker = NextVersionIdMarker;
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
module.exports = { DelimiterNonCurrent };
|
|
|
@ -1,204 +0,0 @@
|
||||||
const DelimiterVersions = require('./delimiterVersions').DelimiterVersions;
|
|
||||||
const { FILTER_END } = require('./tools');
|
|
||||||
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters. This extends the base class DelimiterVersions
|
|
||||||
* to return the orphan delete markers. Orphan delete markers are also
|
|
||||||
* refered as expired object delete marker.
|
|
||||||
* They are delete marker with zero noncurrent versions.
|
|
||||||
*/
|
|
||||||
class DelimiterOrphanDeleteMarker extends DelimiterVersions {
|
|
||||||
/**
|
|
||||||
* Delimiter listing of orphan delete markers.
|
|
||||||
* @param {Object} parameters - listing parameters
|
|
||||||
* @param {String} parameters.beforeDate - limit the response to keys older than beforeDate
|
|
||||||
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
|
||||||
* @param {RequestLogger} logger - The logger of the request
|
|
||||||
* @param {String} [vFormat] - versioning key format
|
|
||||||
*/
|
|
||||||
constructor(parameters, logger, vFormat) {
|
|
||||||
const {
|
|
||||||
marker,
|
|
||||||
maxKeys,
|
|
||||||
prefix,
|
|
||||||
beforeDate,
|
|
||||||
maxScannedLifecycleListingEntries,
|
|
||||||
} = parameters;
|
|
||||||
|
|
||||||
const versionParams = {
|
|
||||||
// The orphan delete marker logic uses the term 'marker' instead of 'keyMarker',
|
|
||||||
// as the latter could suggest the presence of a 'versionIdMarker'.
|
|
||||||
keyMarker: marker,
|
|
||||||
maxKeys,
|
|
||||||
prefix,
|
|
||||||
};
|
|
||||||
super(versionParams, logger, vFormat);
|
|
||||||
|
|
||||||
this.maxScannedLifecycleListingEntries = maxScannedLifecycleListingEntries;
|
|
||||||
this.beforeDate = beforeDate;
|
|
||||||
// this.prevKeyName is used as a marker for the next listing when the current one reaches its entry limit.
|
|
||||||
// We cannot rely on this.keyName, as it contains the name of the current key.
|
|
||||||
// In the event of a listing interruption due to reaching the maximum scanned entries,
|
|
||||||
// relying on this.keyName would cause the next listing to skip the current key because S3 starts
|
|
||||||
// listing after the marker.
|
|
||||||
this.prevKeyName = null;
|
|
||||||
this.keyName = null;
|
|
||||||
this.value = null;
|
|
||||||
this.scannedKeys = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
_reachedMaxKeys() {
|
|
||||||
if (this.keys >= this.maxKeys) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
_addOrphan() {
|
|
||||||
const parsedValue = this._parse(this.value);
|
|
||||||
// if parsing fails, skip the key.
|
|
||||||
if (parsedValue) {
|
|
||||||
const lastModified = parsedValue['last-modified'];
|
|
||||||
const isDeleteMarker = parsedValue.isDeleteMarker;
|
|
||||||
// We then check if the orphan version is a delete marker and if it is older than the "beforeDate"
|
|
||||||
if ((!this.beforeDate || (lastModified && lastModified < this.beforeDate)) && isDeleteMarker) {
|
|
||||||
// Prefer returning an untrimmed data rather than stopping the service in case of parsing failure.
|
|
||||||
const s = this._stringify(parsedValue) || this.value;
|
|
||||||
this.Versions.push({ key: this.keyName, value: s });
|
|
||||||
this.nextKeyMarker = this.keyName;
|
|
||||||
++this.keys;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parses the stringified entry's value and remove the location property if too large.
|
|
||||||
* @param {string} s - sringified value
|
|
||||||
* @return {object} p - undefined if parsing fails, otherwise it contains the parsed value.
|
|
||||||
*/
|
|
||||||
_parse(s) {
|
|
||||||
let p;
|
|
||||||
try {
|
|
||||||
p = JSON.parse(s);
|
|
||||||
if (s.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
|
||||||
delete p.location;
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
this.logger.warn('Could not parse Object Metadata while listing', {
|
|
||||||
method: 'DelimiterOrphanDeleteMarker._parse',
|
|
||||||
err: e.toString(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return p;
|
|
||||||
}
|
|
||||||
|
|
||||||
_stringify(value) {
|
|
||||||
const p = value;
|
|
||||||
let s = undefined;
|
|
||||||
try {
|
|
||||||
s = JSON.stringify(p);
|
|
||||||
} catch (e) {
|
|
||||||
this.logger.warn('could not stringify Object Metadata while listing',
|
|
||||||
{
|
|
||||||
method: 'DelimiterOrphanDeleteMarker._stringify',
|
|
||||||
err: e.toString(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return s;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* The purpose of _isMaxScannedEntriesReached is to restrict the number of scanned entries,
|
|
||||||
* thus controlling resource overhead (CPU...).
|
|
||||||
* @return {boolean} isMaxScannedEntriesReached - true if the maximum limit on the number
|
|
||||||
* of entries scanned has been reached, false otherwise.
|
|
||||||
*/
|
|
||||||
_isMaxScannedEntriesReached() {
|
|
||||||
return this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries;
|
|
||||||
}
|
|
||||||
|
|
||||||
filter(obj) {
|
|
||||||
if (this._isMaxScannedEntriesReached()) {
|
|
||||||
this.nextKeyMarker = this.prevKeyName;
|
|
||||||
this.IsTruncated = true;
|
|
||||||
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
|
||||||
{
|
|
||||||
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
|
||||||
scannedKeys: this.scannedKeys,
|
|
||||||
});
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
++this.scannedKeys;
|
|
||||||
return super.filter(obj);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* NOTE: Each version of a specific key is sorted from the latest to the oldest
|
|
||||||
* thanks to the way version ids are generated.
|
|
||||||
* DESCRIPTION: For a given key, the latest version is kept in memory since it is the current version.
|
|
||||||
* If the following version reference a new key, it means that the previous one was an orphan version.
|
|
||||||
* We then check if the orphan version is a delete marker and if it is older than the "beforeDate"
|
|
||||||
* The process stops and returns the available results if either:
|
|
||||||
* - no more metadata key is left to be processed
|
|
||||||
* - the listing reaches the maximum number of key to be returned
|
|
||||||
* - the internal timeout is reached
|
|
||||||
* NOTE: we cannot leverage MongoDB to list keys older than "beforeDate"
|
|
||||||
* because then we will not be able to assess its orphanage.
|
|
||||||
* @param {String} key - The object key.
|
|
||||||
* @param {String} versionId - The object version id.
|
|
||||||
* @param {String} value - The value of the key
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addVersion(key, versionId, value) {
|
|
||||||
// For a given key, the youngest version is kept in memory since it represents the current version.
|
|
||||||
if (key !== this.keyName) {
|
|
||||||
// If this.value is defined, it means that <this.keyName, this.value> pair is "allowed" to be an orphan.
|
|
||||||
if (this.value) {
|
|
||||||
this._addOrphan();
|
|
||||||
}
|
|
||||||
this.prevKeyName = this.keyName;
|
|
||||||
this.keyName = key;
|
|
||||||
this.value = value;
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the key is not the current version, we can skip it in the next listing
|
|
||||||
// in the case where the current listing is interrupted due to reaching the maximum scanned entries.
|
|
||||||
this.prevKeyName = key;
|
|
||||||
this.keyName = key;
|
|
||||||
this.value = null;
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
result() {
|
|
||||||
// Only check for remaining last orphan delete marker if the listing is not interrupted.
|
|
||||||
// This will help avoid false positives.
|
|
||||||
if (!this._isMaxScannedEntriesReached()) {
|
|
||||||
// The following check makes sure the last orphan delete marker is not forgotten.
|
|
||||||
if (this.keys < this.maxKeys) {
|
|
||||||
if (this.value) {
|
|
||||||
this._addOrphan();
|
|
||||||
}
|
|
||||||
// The following make sure that if makeKeys is reached, isTruncated is set to true.
|
|
||||||
// We moved the "isTruncated" from _reachedMaxKeys to make sure we take into account the last entity
|
|
||||||
// if listing is truncated right before the last entity and the last entity is a orphan delete marker.
|
|
||||||
} else {
|
|
||||||
this.IsTruncated = this.maxKeys > 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = {
|
|
||||||
Contents: this.Versions,
|
|
||||||
IsTruncated: this.IsTruncated,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (this.IsTruncated) {
|
|
||||||
result.NextMarker = this.nextKeyMarker;
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { DelimiterOrphanDeleteMarker };
|
|
|
@ -1,535 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const Extension = require('./Extension').default;
|
|
||||||
|
|
||||||
import {
|
|
||||||
FilterState,
|
|
||||||
FilterReturnValue,
|
|
||||||
} from './delimiter';
|
|
||||||
|
|
||||||
const Version = require('../../versioning/Version').Version;
|
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
|
||||||
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
|
|
||||||
require('./tools');
|
|
||||||
|
|
||||||
const VID_SEP = VSConst.VersionId.Separator;
|
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
|
||||||
|
|
||||||
export const enum DelimiterVersionsFilterStateId {
|
|
||||||
NotSkipping = 1,
|
|
||||||
SkippingPrefix = 2,
|
|
||||||
SkippingVersions = 3,
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface DelimiterVersionsFilterState_NotSkipping extends FilterState {
|
|
||||||
id: DelimiterVersionsFilterStateId.NotSkipping,
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface DelimiterVersionsFilterState_SkippingPrefix extends FilterState {
|
|
||||||
id: DelimiterVersionsFilterStateId.SkippingPrefix,
|
|
||||||
prefix: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface DelimiterVersionsFilterState_SkippingVersions extends FilterState {
|
|
||||||
id: DelimiterVersionsFilterStateId.SkippingVersions,
|
|
||||||
gt: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
type KeyHandler = (key: string, versionId: string | undefined, value: string) => FilterReturnValue;
|
|
||||||
|
|
||||||
type ResultObject = {
|
|
||||||
CommonPrefixes: string[],
|
|
||||||
Versions: {
|
|
||||||
key: string;
|
|
||||||
value: string;
|
|
||||||
versionId: string;
|
|
||||||
}[];
|
|
||||||
IsTruncated: boolean;
|
|
||||||
Delimiter ?: string;
|
|
||||||
NextKeyMarker ?: string;
|
|
||||||
NextVersionIdMarker ?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GenMDParamsItem = {
|
|
||||||
gt ?: string,
|
|
||||||
gte ?: string,
|
|
||||||
lt ?: string,
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters
|
|
||||||
*
|
|
||||||
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
|
||||||
* @prop {String[]} Contents - 'files' to list
|
|
||||||
* @prop {Boolean} IsTruncated - truncated listing flag
|
|
||||||
* @prop {String|undefined} NextMarker - marker per amazon format
|
|
||||||
* @prop {Number} keys - count of listed keys
|
|
||||||
* @prop {String|undefined} delimiter - separator per amazon format
|
|
||||||
* @prop {String|undefined} prefix - prefix per amazon format
|
|
||||||
* @prop {Number} maxKeys - number of keys to list
|
|
||||||
*/
|
|
||||||
export class DelimiterVersions extends Extension {
|
|
||||||
|
|
||||||
state: FilterState;
|
|
||||||
keyHandlers: { [id: number]: KeyHandler };
|
|
||||||
|
|
||||||
constructor(parameters, logger, vFormat) {
|
|
||||||
super(parameters, logger);
|
|
||||||
// original listing parameters
|
|
||||||
this.delimiter = parameters.delimiter;
|
|
||||||
this.prefix = parameters.prefix;
|
|
||||||
this.maxKeys = parameters.maxKeys || 1000;
|
|
||||||
// specific to version listing
|
|
||||||
this.keyMarker = parameters.keyMarker;
|
|
||||||
this.versionIdMarker = parameters.versionIdMarker;
|
|
||||||
// internal state
|
|
||||||
this.masterKey = undefined;
|
|
||||||
this.masterVersionId = undefined;
|
|
||||||
this.nullKey = null;
|
|
||||||
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
|
||||||
// listing results
|
|
||||||
this.CommonPrefixes = [];
|
|
||||||
this.Versions = [];
|
|
||||||
this.IsTruncated = false;
|
|
||||||
this.nextKeyMarker = parameters.keyMarker;
|
|
||||||
this.nextVersionIdMarker = undefined;
|
|
||||||
|
|
||||||
this.keyHandlers = {};
|
|
||||||
|
|
||||||
Object.assign(this, {
|
|
||||||
[BucketVersioningKeyFormat.v0]: {
|
|
||||||
genMDParams: this.genMDParamsV0,
|
|
||||||
getObjectKey: this.getObjectKeyV0,
|
|
||||||
skipping: this.skippingV0,
|
|
||||||
},
|
|
||||||
[BucketVersioningKeyFormat.v1]: {
|
|
||||||
genMDParams: this.genMDParamsV1,
|
|
||||||
getObjectKey: this.getObjectKeyV1,
|
|
||||||
skipping: this.skippingV1,
|
|
||||||
},
|
|
||||||
}[this.vFormat]);
|
|
||||||
|
|
||||||
if (this.vFormat === BucketVersioningKeyFormat.v0) {
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterVersionsFilterStateId.NotSkipping,
|
|
||||||
this.keyHandler_NotSkippingV0.bind(this));
|
|
||||||
} else {
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterVersionsFilterStateId.NotSkipping,
|
|
||||||
this.keyHandler_NotSkippingV1.bind(this));
|
|
||||||
}
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterVersionsFilterStateId.SkippingPrefix,
|
|
||||||
this.keyHandler_SkippingPrefix.bind(this));
|
|
||||||
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterVersionsFilterStateId.SkippingVersions,
|
|
||||||
this.keyHandler_SkippingVersions.bind(this));
|
|
||||||
|
|
||||||
if (this.versionIdMarker) {
|
|
||||||
this.state = <DelimiterVersionsFilterState_SkippingVersions> {
|
|
||||||
id: DelimiterVersionsFilterStateId.SkippingVersions,
|
|
||||||
gt: `${this.keyMarker}${VID_SEP}${this.versionIdMarker}`,
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
this.state = <DelimiterVersionsFilterState_NotSkipping> {
|
|
||||||
id: DelimiterVersionsFilterStateId.NotSkipping,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV0() {
|
|
||||||
const params: GenMDParamsItem = {};
|
|
||||||
if (this.prefix) {
|
|
||||||
params.gte = this.prefix;
|
|
||||||
params.lt = inc(this.prefix);
|
|
||||||
}
|
|
||||||
if (this.keyMarker && this.delimiter) {
|
|
||||||
const commonPrefix = this.getCommonPrefix(this.keyMarker);
|
|
||||||
if (commonPrefix) {
|
|
||||||
const afterPrefix = inc(commonPrefix);
|
|
||||||
if (!params.gte || afterPrefix > params.gte) {
|
|
||||||
params.gte = afterPrefix;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (this.keyMarker && (!params.gte || this.keyMarker >= params.gte)) {
|
|
||||||
delete params.gte;
|
|
||||||
if (this.versionIdMarker) {
|
|
||||||
// start from the beginning of versions so we can
|
|
||||||
// check if there's a null key and fetch it
|
|
||||||
// (afterwards, we can skip the rest of versions until
|
|
||||||
// we reach versionIdMarker)
|
|
||||||
params.gte = `${this.keyMarker}${VID_SEP}`;
|
|
||||||
} else {
|
|
||||||
params.gt = `${this.keyMarker}${inc(VID_SEP)}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV1() {
|
|
||||||
// return an array of two listing params sets to ask for
|
|
||||||
// synchronized listing of M and V ranges
|
|
||||||
const v0Params: GenMDParamsItem = this.genMDParamsV0();
|
|
||||||
const mParams: GenMDParamsItem = {};
|
|
||||||
const vParams: GenMDParamsItem = {};
|
|
||||||
if (v0Params.gt) {
|
|
||||||
mParams.gt = `${DbPrefixes.Master}${v0Params.gt}`;
|
|
||||||
vParams.gt = `${DbPrefixes.Version}${v0Params.gt}`;
|
|
||||||
} else if (v0Params.gte) {
|
|
||||||
mParams.gte = `${DbPrefixes.Master}${v0Params.gte}`;
|
|
||||||
vParams.gte = `${DbPrefixes.Version}${v0Params.gte}`;
|
|
||||||
} else {
|
|
||||||
mParams.gte = DbPrefixes.Master;
|
|
||||||
vParams.gte = DbPrefixes.Version;
|
|
||||||
}
|
|
||||||
if (v0Params.lt) {
|
|
||||||
mParams.lt = `${DbPrefixes.Master}${v0Params.lt}`;
|
|
||||||
vParams.lt = `${DbPrefixes.Version}${v0Params.lt}`;
|
|
||||||
} else {
|
|
||||||
mParams.lt = inc(DbPrefixes.Master);
|
|
||||||
vParams.lt = inc(DbPrefixes.Version);
|
|
||||||
}
|
|
||||||
return [mParams, vParams];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* check if the max keys count has been reached and set the
|
|
||||||
* final state of the result if it is the case
|
|
||||||
* @return {Boolean} - indicates if the iteration has to stop
|
|
||||||
*/
|
|
||||||
_reachedMaxKeys(): boolean {
|
|
||||||
if (this.keys >= this.maxKeys) {
|
|
||||||
// In cases of maxKeys <= 0 -> IsTruncated = false
|
|
||||||
this.IsTruncated = this.maxKeys > 0;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Used to synchronize listing of M and V prefixes by object key
|
|
||||||
*
|
|
||||||
* @param {object} masterObj object listed from first range
|
|
||||||
* returned by genMDParamsV1() (the master keys range)
|
|
||||||
* @param {object} versionObj object listed from second range
|
|
||||||
* returned by genMDParamsV1() (the version keys range)
|
|
||||||
* @return {number} comparison result:
|
|
||||||
* * -1 if master key < version key
|
|
||||||
* * 1 if master key > version key
|
|
||||||
*/
|
|
||||||
compareObjects(masterObj, versionObj) {
|
|
||||||
const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
|
|
||||||
const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
|
|
||||||
return masterKey < versionKey ? -1 : 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parse a listing key into its nonversioned key and version ID components
|
|
||||||
*
|
|
||||||
* @param {string} key - full listing key
|
|
||||||
* @return {object} obj
|
|
||||||
* @return {string} obj.key - nonversioned part of key
|
|
||||||
* @return {string} [obj.versionId] - version ID in the key
|
|
||||||
*/
|
|
||||||
parseKey(fullKey: string): { key: string, versionId ?: string } {
|
|
||||||
const versionIdIndex = fullKey.indexOf(VID_SEP);
|
|
||||||
if (versionIdIndex === -1) {
|
|
||||||
return { key: fullKey };
|
|
||||||
}
|
|
||||||
const nonversionedKey: string = fullKey.slice(0, versionIdIndex);
|
|
||||||
let versionId: string = fullKey.slice(versionIdIndex + 1);
|
|
||||||
return { key: nonversionedKey, versionId };
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Include a key in the listing output, in the Versions or CommonPrefix result
|
|
||||||
*
|
|
||||||
* @param {string} key - key (without version ID)
|
|
||||||
* @param {string} versionId - version ID
|
|
||||||
* @param {string} value - metadata value
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addKey(key: string, versionId: string, value: string) {
|
|
||||||
// add the subprefix to the common prefixes if the key has the delimiter
|
|
||||||
const commonPrefix = this.getCommonPrefix(key);
|
|
||||||
if (commonPrefix) {
|
|
||||||
this.addCommonPrefix(commonPrefix);
|
|
||||||
// transition into SkippingPrefix state to skip all following keys
|
|
||||||
// while they start with the same prefix
|
|
||||||
this.setState(<DelimiterVersionsFilterState_SkippingPrefix> {
|
|
||||||
id: DelimiterVersionsFilterStateId.SkippingPrefix,
|
|
||||||
prefix: commonPrefix,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
this.addVersion(key, versionId, value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a (key, versionId, value) tuple to the listing.
|
|
||||||
* Set the NextMarker to the current key
|
|
||||||
* Increment the keys counter
|
|
||||||
* @param {String} key - The key to add
|
|
||||||
* @param {String} versionId - versionId
|
|
||||||
* @param {String} value - The value of the key
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addVersion(key: string, versionId: string, value: string) {
|
|
||||||
this.Versions.push({
|
|
||||||
key,
|
|
||||||
versionId,
|
|
||||||
value: this.trimMetadata(value),
|
|
||||||
});
|
|
||||||
this.nextKeyMarker = key;
|
|
||||||
this.nextVersionIdMarker = versionId;
|
|
||||||
++this.keys;
|
|
||||||
}
|
|
||||||
|
|
||||||
getCommonPrefix(key: string): string | undefined {
|
|
||||||
if (!this.delimiter) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
|
||||||
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
|
||||||
if (delimiterIndex === -1) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return key.substring(0, delimiterIndex + this.delimiter.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a Common Prefix in the list
|
|
||||||
* @param {String} commonPrefix - common prefix to add
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addCommonPrefix(commonPrefix: string): void {
|
|
||||||
// add the new prefix to the list
|
|
||||||
this.CommonPrefixes.push(commonPrefix);
|
|
||||||
++this.keys;
|
|
||||||
this.nextKeyMarker = commonPrefix;
|
|
||||||
this.nextVersionIdMarker = undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Cache the current null key, to save it for outputting it later at
|
|
||||||
* the correct position
|
|
||||||
*
|
|
||||||
* @param {String} key - nonversioned key of the null key
|
|
||||||
* @param {String} versionId - real version ID of the null key
|
|
||||||
* @param {String} value - value of the null key
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
cacheNullKey(key: string, versionId: string, value: string): void {
|
|
||||||
this.nullKey = { key, versionId, value };
|
|
||||||
}
|
|
||||||
|
|
||||||
getObjectKeyV0(obj: { key: string }): string {
|
|
||||||
return obj.key;
|
|
||||||
}
|
|
||||||
|
|
||||||
getObjectKeyV1(obj: { key: string }): string {
|
|
||||||
return obj.key.slice(DbPrefixes.Master.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter to apply on each iteration, based on:
|
|
||||||
* - prefix
|
|
||||||
* - delimiter
|
|
||||||
* - maxKeys
|
|
||||||
* The marker is being handled directly by levelDB
|
|
||||||
* @param {Object} obj - The key and value of the element
|
|
||||||
* @param {String} obj.key - The key of the element
|
|
||||||
* @param {String} obj.value - The value of the element
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
filter(obj: { key: string, value: string }): FilterReturnValue {
|
|
||||||
const key = this.getObjectKey(obj);
|
|
||||||
const value = obj.value;
|
|
||||||
|
|
||||||
const { key: nonversionedKey, versionId: keyVersionId } = this.parseKey(key);
|
|
||||||
if (this.nullKey) {
|
|
||||||
if (this.nullKey.key !== nonversionedKey
|
|
||||||
|| this.nullKey.versionId < <string> keyVersionId) {
|
|
||||||
this.handleKey(
|
|
||||||
this.nullKey.key, this.nullKey.versionId, this.nullKey.value);
|
|
||||||
this.nullKey = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (keyVersionId === '') {
|
|
||||||
// null key
|
|
||||||
this.cacheNullKey(nonversionedKey, Version.from(value).getVersionId(), value);
|
|
||||||
if (this.state.id === DelimiterVersionsFilterStateId.SkippingVersions) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
return this.handleKey(nonversionedKey, keyVersionId, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
setState(state: FilterState): void {
|
|
||||||
this.state = state;
|
|
||||||
}
|
|
||||||
|
|
||||||
setKeyHandler(stateId: number, keyHandler: KeyHandler): void {
|
|
||||||
this.keyHandlers[stateId] = keyHandler;
|
|
||||||
}
|
|
||||||
|
|
||||||
handleKey(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
|
||||||
return this.keyHandlers[this.state.id](key, versionId, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_NotSkippingV0(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
|
||||||
if (key.startsWith(DbPrefixes.Replay)) {
|
|
||||||
// skip internal replay prefix entirely
|
|
||||||
this.setState(<DelimiterVersionsFilterState_SkippingPrefix> {
|
|
||||||
id: DelimiterVersionsFilterStateId.SkippingPrefix,
|
|
||||||
prefix: DbPrefixes.Replay,
|
|
||||||
});
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
if (Version.isPHD(value)) {
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
return this.filter_onNewKey(key, versionId, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_NotSkippingV1(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
|
||||||
// NOTE: this check on PHD is only useful for Artesca, S3C
|
|
||||||
// does not use PHDs in V1 format
|
|
||||||
if (Version.isPHD(value)) {
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
return this.filter_onNewKey(key, versionId, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
filter_onNewKey(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
|
||||||
if (this._reachedMaxKeys()) {
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
if (versionId === undefined) {
|
|
||||||
this.masterKey = key;
|
|
||||||
this.masterVersionId = Version.from(value).getVersionId() || 'null';
|
|
||||||
this.addKey(this.masterKey, this.masterVersionId, value);
|
|
||||||
} else {
|
|
||||||
if (this.masterKey === key && this.masterVersionId === versionId) {
|
|
||||||
// do not add a version key if it is the master version
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
this.addKey(key, versionId, value);
|
|
||||||
}
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_SkippingPrefix(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
|
||||||
const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state;
|
|
||||||
if (key.startsWith(prefix)) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
this.setState(<DelimiterVersionsFilterState_NotSkipping> {
|
|
||||||
id: DelimiterVersionsFilterStateId.NotSkipping,
|
|
||||||
});
|
|
||||||
return this.handleKey(key, versionId, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_SkippingVersions(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
|
||||||
if (key === this.keyMarker) {
|
|
||||||
// since the nonversioned key equals the marker, there is
|
|
||||||
// necessarily a versionId in this key
|
|
||||||
const _versionId = <string> versionId;
|
|
||||||
if (_versionId < this.versionIdMarker) {
|
|
||||||
// skip all versions until marker
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
if (_versionId === this.versionIdMarker) {
|
|
||||||
// nothing left to skip, so return ACCEPT, but don't add this version
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this.setState(<DelimiterVersionsFilterState_NotSkipping> {
|
|
||||||
id: DelimiterVersionsFilterStateId.NotSkipping,
|
|
||||||
});
|
|
||||||
return this.handleKey(key, versionId, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingBase(): string | undefined {
|
|
||||||
switch (this.state.id) {
|
|
||||||
case DelimiterVersionsFilterStateId.SkippingPrefix:
|
|
||||||
const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state;
|
|
||||||
return inc(prefix);
|
|
||||||
|
|
||||||
case DelimiterVersionsFilterStateId.SkippingVersions:
|
|
||||||
const { gt } = <DelimiterVersionsFilterState_SkippingVersions> this.state;
|
|
||||||
// the contract of skipping() is to return the first key
|
|
||||||
// that can be skipped to, so adding a null byte to skip
|
|
||||||
// over the existing versioned key set in 'gt'
|
|
||||||
return `${gt}\0`;
|
|
||||||
|
|
||||||
default:
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingV0() {
|
|
||||||
return this.skippingBase();
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingV1() {
|
|
||||||
const skipTo = this.skippingBase();
|
|
||||||
if (skipTo === SKIP_NONE) {
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
// skip to the same object key in both M and V range listings
|
|
||||||
return [
|
|
||||||
`${DbPrefixes.Master}${skipTo}`,
|
|
||||||
`${DbPrefixes.Version}${skipTo}`,
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return an object containing all mandatory fields to use once the
|
|
||||||
* iteration is done, doesn't show a NextMarker field if the output
|
|
||||||
* isn't truncated
|
|
||||||
* @return {Object} - following amazon format
|
|
||||||
*/
|
|
||||||
result() {
|
|
||||||
// Add the last null key if still in cache (when it is the
|
|
||||||
// last version of the last key)
|
|
||||||
//
|
|
||||||
// NOTE: _reachedMaxKeys sets IsTruncated to true when it
|
|
||||||
// returns true. Here we want this because either:
|
|
||||||
//
|
|
||||||
// - we did not reach the max keys yet so the result is not
|
|
||||||
// - truncated, and there is still room for the null key in
|
|
||||||
// - the results
|
|
||||||
//
|
|
||||||
// - OR we reached it already while having to process a new
|
|
||||||
// key (so the result is truncated even without the null key)
|
|
||||||
//
|
|
||||||
// - OR we are *just* below the limit but the null key to add
|
|
||||||
// does not fit, so we know the result is now truncated
|
|
||||||
// because there remains the null key to be output.
|
|
||||||
//
|
|
||||||
if (this.nullKey) {
|
|
||||||
this.handleKey(this.nullKey.key, this.nullKey.versionId, this.nullKey.value);
|
|
||||||
}
|
|
||||||
const result: ResultObject = {
|
|
||||||
CommonPrefixes: this.CommonPrefixes,
|
|
||||||
Versions: this.Versions,
|
|
||||||
IsTruncated: this.IsTruncated,
|
|
||||||
};
|
|
||||||
if (this.delimiter) {
|
|
||||||
result.Delimiter = this.delimiter;
|
|
||||||
}
|
|
||||||
if (this.IsTruncated) {
|
|
||||||
result.NextKeyMarker = this.nextKeyMarker;
|
|
||||||
if (this.nextVersionIdMarker) {
|
|
||||||
result.NextVersionIdMarker = this.nextVersionIdMarker;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { DelimiterVersions };
|
|
|
@ -1,12 +0,0 @@
|
||||||
module.exports = {
|
|
||||||
Basic: require('./basic').List,
|
|
||||||
Delimiter: require('./delimiter').Delimiter,
|
|
||||||
DelimiterVersions: require('./delimiterVersions')
|
|
||||||
.DelimiterVersions,
|
|
||||||
DelimiterMaster: require('./delimiterMaster')
|
|
||||||
.DelimiterMaster,
|
|
||||||
MPU: require('./MPU').MultipartUploads,
|
|
||||||
DelimiterCurrent: require('./delimiterCurrent').DelimiterCurrent,
|
|
||||||
DelimiterNonCurrent: require('./delimiterNonCurrent').DelimiterNonCurrent,
|
|
||||||
DelimiterOrphanDeleteMarker: require('./delimiterOrphanDeleteMarker').DelimiterOrphanDeleteMarker,
|
|
||||||
};
|
|
|
@ -1,85 +0,0 @@
|
||||||
const assert = require('assert');
|
|
||||||
|
|
||||||
const { FILTER_END, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
|
||||||
|
|
||||||
|
|
||||||
const MAX_STREAK_LENGTH = 100;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle the filtering and the skip mechanism of a listing result.
|
|
||||||
*/
|
|
||||||
class Skip {
|
|
||||||
/**
|
|
||||||
* @param {Object} params - skip parameters
|
|
||||||
* @param {Object} params.extension - delimiter extension used (required)
|
|
||||||
* @param {String} params.gte - current range gte (greater than or
|
|
||||||
* equal) used by the client code
|
|
||||||
*/
|
|
||||||
constructor(params) {
|
|
||||||
assert(params.extension);
|
|
||||||
|
|
||||||
this.extension = params.extension;
|
|
||||||
this.gteParams = params.gte;
|
|
||||||
|
|
||||||
this.listingEndCb = null;
|
|
||||||
this.skipRangeCb = null;
|
|
||||||
|
|
||||||
/* Used to count consecutive FILTER_SKIP returned by the extension
|
|
||||||
* filter method. Once this counter reaches MAX_STREAK_LENGTH, the
|
|
||||||
* filter function tries to skip unwanted values by defining a new
|
|
||||||
* range. */
|
|
||||||
this.streakLength = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
setListingEndCb(cb) {
|
|
||||||
this.listingEndCb = cb;
|
|
||||||
}
|
|
||||||
|
|
||||||
setSkipRangeCb(cb) {
|
|
||||||
this.skipRangeCb = cb;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter an entry.
|
|
||||||
* @param {Object} entry - entry to filter.
|
|
||||||
* @return {undefined}
|
|
||||||
*
|
|
||||||
* This function calls the listing end or the skip range callbacks if
|
|
||||||
* needed.
|
|
||||||
*/
|
|
||||||
filter(entry) {
|
|
||||||
assert(this.listingEndCb);
|
|
||||||
assert(this.skipRangeCb);
|
|
||||||
|
|
||||||
const filteringResult = this.extension.filter(entry);
|
|
||||||
const skipTo = this.extension.skipping();
|
|
||||||
|
|
||||||
if (filteringResult === FILTER_END) {
|
|
||||||
this.listingEndCb();
|
|
||||||
} else if (filteringResult === FILTER_SKIP
|
|
||||||
&& skipTo !== SKIP_NONE) {
|
|
||||||
if (++this.streakLength >= MAX_STREAK_LENGTH) {
|
|
||||||
let newRange;
|
|
||||||
if (Array.isArray(skipTo)) {
|
|
||||||
newRange = [];
|
|
||||||
for (let i = 0; i < skipTo.length; ++i) {
|
|
||||||
newRange.push(skipTo[i]);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
newRange = skipTo;
|
|
||||||
}
|
|
||||||
/* Avoid to loop on the same range again and again. */
|
|
||||||
if (newRange === this.gteParams) {
|
|
||||||
this.streakLength = 1;
|
|
||||||
} else {
|
|
||||||
this.skipRangeCb(newRange);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
this.streakLength = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
module.exports = Skip;
|
|
|
@ -1,70 +0,0 @@
|
||||||
const { DbPrefixes } = require('../../versioning/constants').VersioningConstants;
|
|
||||||
|
|
||||||
// constants for extensions
|
|
||||||
const SKIP_NONE = undefined; // to be inline with the values of NextMarker
|
|
||||||
const FILTER_ACCEPT = 1;
|
|
||||||
const FILTER_SKIP = 0;
|
|
||||||
const FILTER_END = -1;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function check if number is valid
|
|
||||||
* To be valid a number need to be an Integer and be lower than the limit
|
|
||||||
* if specified
|
|
||||||
* If the number is not valid the limit is returned
|
|
||||||
* @param {Number} number - The number to check
|
|
||||||
* @param {Number} limit - The limit to respect
|
|
||||||
* @return {Number} - The parsed number || limit
|
|
||||||
*/
|
|
||||||
function checkLimit(number, limit) {
|
|
||||||
const parsed = Number.parseInt(number, 10);
|
|
||||||
const valid = !Number.isNaN(parsed) && (!limit || parsed <= limit);
|
|
||||||
return valid ? parsed : limit;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Increment the charCode of the last character of a valid string.
|
|
||||||
*
|
|
||||||
* @param {string} str - the input string
|
|
||||||
* @return {string} - the incremented string
|
|
||||||
* or the input if it is not valid
|
|
||||||
*/
|
|
||||||
function inc(str) {
|
|
||||||
return str ? (str.slice(0, str.length - 1) +
|
|
||||||
String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Transform listing parameters for v0 versioning key format to make
|
|
||||||
* it compatible with v1 format
|
|
||||||
*
|
|
||||||
* @param {object} v0params - listing parameters for v0 format
|
|
||||||
* @return {object} - listing parameters for v1 format
|
|
||||||
*/
|
|
||||||
function listingParamsMasterKeysV0ToV1(v0params) {
|
|
||||||
const v1params = Object.assign({}, v0params);
|
|
||||||
if (v0params.gt !== undefined) {
|
|
||||||
v1params.gt = `${DbPrefixes.Master}${v0params.gt}`;
|
|
||||||
} else if (v0params.gte !== undefined) {
|
|
||||||
v1params.gte = `${DbPrefixes.Master}${v0params.gte}`;
|
|
||||||
} else {
|
|
||||||
v1params.gte = DbPrefixes.Master;
|
|
||||||
}
|
|
||||||
if (v0params.lt !== undefined) {
|
|
||||||
v1params.lt = `${DbPrefixes.Master}${v0params.lt}`;
|
|
||||||
} else if (v0params.lte !== undefined) {
|
|
||||||
v1params.lte = `${DbPrefixes.Master}${v0params.lte}`;
|
|
||||||
} else {
|
|
||||||
v1params.lt = inc(DbPrefixes.Master); // stop after the last master key
|
|
||||||
}
|
|
||||||
return v1params;
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
checkLimit,
|
|
||||||
inc,
|
|
||||||
listingParamsMasterKeysV0ToV1,
|
|
||||||
SKIP_NONE,
|
|
||||||
FILTER_END,
|
|
||||||
FILTER_SKIP,
|
|
||||||
FILTER_ACCEPT,
|
|
||||||
};
|
|
|
@ -1,87 +0,0 @@
|
||||||
function indexOf(arr, value) {
|
|
||||||
if (!arr.length) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
let lo = 0;
|
|
||||||
let hi = arr.length - 1;
|
|
||||||
|
|
||||||
while (hi - lo > 1) {
|
|
||||||
const i = lo + ((hi - lo) >> 1);
|
|
||||||
if (arr[i] > value) {
|
|
||||||
hi = i;
|
|
||||||
} else {
|
|
||||||
lo = i;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (arr[lo] === value) {
|
|
||||||
return lo;
|
|
||||||
}
|
|
||||||
if (arr[hi] === value) {
|
|
||||||
return hi;
|
|
||||||
}
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
|
|
||||||
function indexAtOrBelow(arr, value) {
|
|
||||||
let i;
|
|
||||||
let lo;
|
|
||||||
let hi;
|
|
||||||
|
|
||||||
if (!arr.length || arr[0] > value) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
if (arr[arr.length - 1] <= value) {
|
|
||||||
return arr.length - 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
lo = 0;
|
|
||||||
hi = arr.length - 1;
|
|
||||||
|
|
||||||
while (hi - lo > 1) {
|
|
||||||
i = lo + ((hi - lo) >> 1);
|
|
||||||
if (arr[i] > value) {
|
|
||||||
hi = i;
|
|
||||||
} else {
|
|
||||||
lo = i;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return lo;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* perform symmetric diff in O(m + n)
|
|
||||||
*/
|
|
||||||
function symDiff(k1, k2, v1, v2, cb) {
|
|
||||||
let i = 0;
|
|
||||||
let j = 0;
|
|
||||||
const n = k1.length;
|
|
||||||
const m = k2.length;
|
|
||||||
|
|
||||||
while (i < n && j < m) {
|
|
||||||
if (k1[i] < k2[j]) {
|
|
||||||
cb(v1[i]);
|
|
||||||
i++;
|
|
||||||
} else if (k2[j] < k1[i]) {
|
|
||||||
cb(v2[j]);
|
|
||||||
j++;
|
|
||||||
} else {
|
|
||||||
i++;
|
|
||||||
j++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
while (i < n) {
|
|
||||||
cb(v1[i]);
|
|
||||||
i++;
|
|
||||||
}
|
|
||||||
while (j < m) {
|
|
||||||
cb(v2[j]);
|
|
||||||
j++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
indexOf,
|
|
||||||
indexAtOrBelow,
|
|
||||||
symDiff,
|
|
||||||
};
|
|
|
@ -1,51 +0,0 @@
|
||||||
const ArrayUtils = require('./ArrayUtils');
|
|
||||||
|
|
||||||
class SortedSet {
|
|
||||||
constructor(obj) {
|
|
||||||
if (obj) {
|
|
||||||
this.keys = obj.keys;
|
|
||||||
this.values = obj.values;
|
|
||||||
} else {
|
|
||||||
this.clear();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
clear() {
|
|
||||||
this.keys = [];
|
|
||||||
this.values = [];
|
|
||||||
}
|
|
||||||
|
|
||||||
get size() {
|
|
||||||
return this.keys.length;
|
|
||||||
}
|
|
||||||
|
|
||||||
set(key, value) {
|
|
||||||
const index = ArrayUtils.indexAtOrBelow(this.keys, key);
|
|
||||||
if (this.keys[index] === key) {
|
|
||||||
this.values[index] = value;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
this.keys.splice(index + 1, 0, key);
|
|
||||||
this.values.splice(index + 1, 0, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
isSet(key) {
|
|
||||||
const index = ArrayUtils.indexOf(this.keys, key);
|
|
||||||
return index >= 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
get(key) {
|
|
||||||
const index = ArrayUtils.indexOf(this.keys, key);
|
|
||||||
return index >= 0 ? this.values[index] : undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
del(key) {
|
|
||||||
const index = ArrayUtils.indexOf(this.keys, key);
|
|
||||||
if (index >= 0) {
|
|
||||||
this.keys.splice(index, 1);
|
|
||||||
this.values.splice(index, 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = SortedSet;
|
|
|
@ -1,106 +0,0 @@
|
||||||
const stream = require('stream');
|
|
||||||
|
|
||||||
class MergeStream extends stream.Readable {
|
|
||||||
constructor(stream1, stream2, compare) {
|
|
||||||
super({ objectMode: true });
|
|
||||||
|
|
||||||
this._compare = compare;
|
|
||||||
this._streams = [stream1, stream2];
|
|
||||||
|
|
||||||
// peekItems elements represent the latest item consumed from
|
|
||||||
// the respective input stream but not yet pushed. It can also
|
|
||||||
// be one of the following special values:
|
|
||||||
// - undefined: stream hasn't started emitting items
|
|
||||||
// - null: EOF reached and no more item to peek
|
|
||||||
this._peekItems = [undefined, undefined];
|
|
||||||
this._streamEof = [false, false];
|
|
||||||
this._streamToResume = null;
|
|
||||||
|
|
||||||
stream1.on('data', item => this._onItem(stream1, item, 0, 1));
|
|
||||||
stream1.once('end', () => this._onEnd(stream1, 0, 1));
|
|
||||||
stream1.once('error', err => this._onError(stream1, err, 0, 1));
|
|
||||||
|
|
||||||
stream2.on('data', item => this._onItem(stream2, item, 1, 0));
|
|
||||||
stream2.once('end', () => this._onEnd(stream2, 1, 0));
|
|
||||||
stream2.once('error', err => this._onError(stream2, err, 1, 0));
|
|
||||||
}
|
|
||||||
|
|
||||||
_read() {
|
|
||||||
if (this._streamToResume) {
|
|
||||||
this._streamToResume.resume();
|
|
||||||
this._streamToResume = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_destroy(err, callback) {
|
|
||||||
for (let i = 0; i < 2; ++i) {
|
|
||||||
if (!this._streamEof[i]) {
|
|
||||||
this._streams[i].destroy();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
callback();
|
|
||||||
}
|
|
||||||
|
|
||||||
_onItem(myStream, myItem, myIndex, otherIndex) {
|
|
||||||
this._peekItems[myIndex] = myItem;
|
|
||||||
const otherItem = this._peekItems[otherIndex];
|
|
||||||
if (otherItem === undefined) {
|
|
||||||
// wait for the other stream to wake up
|
|
||||||
return myStream.pause();
|
|
||||||
}
|
|
||||||
if (otherItem === null || this._compare(myItem, otherItem) <= 0) {
|
|
||||||
if (!this.push(myItem)) {
|
|
||||||
myStream.pause();
|
|
||||||
this._streamToResume = myStream;
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const otherStream = this._streams[otherIndex];
|
|
||||||
const otherMore = this.push(otherItem);
|
|
||||||
if (this._streamEof[otherIndex]) {
|
|
||||||
this._peekItems[otherIndex] = null;
|
|
||||||
return this.push(myItem);
|
|
||||||
}
|
|
||||||
myStream.pause();
|
|
||||||
if (otherMore) {
|
|
||||||
return otherStream.resume();
|
|
||||||
}
|
|
||||||
this._streamToResume = otherStream;
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
_onEnd(myStream, myIndex, otherIndex) {
|
|
||||||
this._streamEof[myIndex] = true;
|
|
||||||
if (this._peekItems[myIndex] === undefined) {
|
|
||||||
this._peekItems[myIndex] = null;
|
|
||||||
}
|
|
||||||
const myItem = this._peekItems[myIndex];
|
|
||||||
const otherItem = this._peekItems[otherIndex];
|
|
||||||
if (otherItem === undefined) {
|
|
||||||
// wait for the other stream to wake up
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
if (otherItem === null) {
|
|
||||||
return this.push(null);
|
|
||||||
}
|
|
||||||
if (myItem === null || this._compare(myItem, otherItem) <= 0) {
|
|
||||||
this.push(otherItem);
|
|
||||||
this._peekItems[myIndex] = null;
|
|
||||||
}
|
|
||||||
if (this._streamEof[otherIndex]) {
|
|
||||||
return this.push(null);
|
|
||||||
}
|
|
||||||
const otherStream = this._streams[otherIndex];
|
|
||||||
return otherStream.resume();
|
|
||||||
}
|
|
||||||
|
|
||||||
_onError(myStream, err, myIndex, otherIndex) {
|
|
||||||
myStream.destroy();
|
|
||||||
if (this._streams[otherIndex]) {
|
|
||||||
this._streams[otherIndex].destroy();
|
|
||||||
}
|
|
||||||
this.emit('error', err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = MergeStream;
|
|
|
@ -1,64 +0,0 @@
|
||||||
import * as constants from '../constants';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Class containing requester's information received from Vault
|
|
||||||
* @param {object} info from Vault including arn, canonicalID,
|
|
||||||
* shortid, email, accountDisplayName and IAMdisplayName (if applicable)
|
|
||||||
* @return {AuthInfo} an AuthInfo instance
|
|
||||||
*/
|
|
||||||
export default class AuthInfo {
|
|
||||||
arn: string;
|
|
||||||
canonicalID: string;
|
|
||||||
shortid: string;
|
|
||||||
email: string;
|
|
||||||
accountDisplayName: string;
|
|
||||||
IAMdisplayName: string;
|
|
||||||
|
|
||||||
constructor(objectFromVault: any) {
|
|
||||||
// amazon resource name for IAM user (if applicable)
|
|
||||||
this.arn = objectFromVault.arn;
|
|
||||||
// account canonicalID
|
|
||||||
this.canonicalID = objectFromVault.canonicalID;
|
|
||||||
// shortid for account (also contained in ARN)
|
|
||||||
this.shortid = objectFromVault.shortid;
|
|
||||||
// email for account or user as applicable
|
|
||||||
this.email = objectFromVault.email;
|
|
||||||
// display name for account
|
|
||||||
this.accountDisplayName = objectFromVault.accountDisplayName;
|
|
||||||
// display name for user (if applicable)
|
|
||||||
this.IAMdisplayName = objectFromVault.IAMdisplayName;
|
|
||||||
}
|
|
||||||
getArn() {
|
|
||||||
return this.arn;
|
|
||||||
}
|
|
||||||
getCanonicalID() {
|
|
||||||
return this.canonicalID;
|
|
||||||
}
|
|
||||||
getShortid() {
|
|
||||||
return this.shortid;
|
|
||||||
}
|
|
||||||
getEmail() {
|
|
||||||
return this.email;
|
|
||||||
}
|
|
||||||
getAccountDisplayName() {
|
|
||||||
return this.accountDisplayName;
|
|
||||||
}
|
|
||||||
getIAMdisplayName() {
|
|
||||||
return this.IAMdisplayName;
|
|
||||||
}
|
|
||||||
// Check whether requester is an IAM user versus an account
|
|
||||||
isRequesterAnIAMUser() {
|
|
||||||
return !!this.IAMdisplayName;
|
|
||||||
}
|
|
||||||
isRequesterPublicUser() {
|
|
||||||
return this.canonicalID === constants.publicId;
|
|
||||||
}
|
|
||||||
isRequesterAServiceAccount() {
|
|
||||||
return this.canonicalID.startsWith(
|
|
||||||
`${constants.zenkoServiceAccount}/`);
|
|
||||||
}
|
|
||||||
isRequesterThisServiceAccount(serviceName: string) {
|
|
||||||
const computedCanonicalID = `${constants.zenkoServiceAccount}/${serviceName}`;
|
|
||||||
return this.canonicalID === computedCanonicalID;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,404 +0,0 @@
|
||||||
import { Logger } from 'werelogs';
|
|
||||||
import errors from '../errors';
|
|
||||||
import AuthInfo from './AuthInfo';
|
|
||||||
|
|
||||||
/** vaultSignatureCb parses message from Vault and instantiates
|
|
||||||
* @param err - error from vault
|
|
||||||
* @param authInfo - info from vault
|
|
||||||
* @param log - log for request
|
|
||||||
* @param callback - callback to authCheck functions
|
|
||||||
* @param [streamingV4Params] - present if v4 signature;
|
|
||||||
* items used to calculate signature on chunks if streaming auth
|
|
||||||
*/
|
|
||||||
function vaultSignatureCb(
|
|
||||||
err: Error | null,
|
|
||||||
authInfo: { message: { body: any } },
|
|
||||||
log: Logger,
|
|
||||||
callback: (err: Error | null, data?: any, results?: any, params?: any, infos?: any) => void,
|
|
||||||
streamingV4Params?: any
|
|
||||||
) {
|
|
||||||
// vaultclient API guarantees that it returns:
|
|
||||||
// - either `err`, an Error object with `code` and `message` properties set
|
|
||||||
// - or `err == null` and `info` is an object with `message.code` and
|
|
||||||
// `message.message` properties set.
|
|
||||||
if (err) {
|
|
||||||
log.debug('received error message from auth provider',
|
|
||||||
{ errorMessage: err });
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
log.debug('received info from Vault', { authInfo });
|
|
||||||
const info = authInfo.message.body;
|
|
||||||
const userInfo = new AuthInfo(info.userInfo);
|
|
||||||
const authorizationResults = info.authorizationResults;
|
|
||||||
const auditLog: { accountDisplayName: string, IAMdisplayName?: string } =
|
|
||||||
{ accountDisplayName: userInfo.getAccountDisplayName() };
|
|
||||||
const iamDisplayName = userInfo.getIAMdisplayName();
|
|
||||||
if (iamDisplayName) {
|
|
||||||
auditLog.IAMdisplayName = iamDisplayName;
|
|
||||||
}
|
|
||||||
// @ts-ignore
|
|
||||||
log.addDefaultFields(auditLog);
|
|
||||||
return callback(null, userInfo, authorizationResults, streamingV4Params, {
|
|
||||||
accountQuota: info.accountQuota || {},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export type AuthV4RequestParams = {
|
|
||||||
version: 4;
|
|
||||||
log: Logger;
|
|
||||||
data: {
|
|
||||||
accessKey: string;
|
|
||||||
signatureFromRequest: string;
|
|
||||||
region: string;
|
|
||||||
stringToSign: string;
|
|
||||||
scopeDate: string;
|
|
||||||
authType: 'query' | 'header';
|
|
||||||
signatureVersion: string;
|
|
||||||
signatureAge?: number;
|
|
||||||
timestamp: number;
|
|
||||||
credentialScope: string;
|
|
||||||
securityToken: string;
|
|
||||||
algo: string;
|
|
||||||
log: Logger;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Class that provides common authentication methods against different
|
|
||||||
* authentication backends.
|
|
||||||
* @class Vault
|
|
||||||
*/
|
|
||||||
export default class Vault {
|
|
||||||
client: any;
|
|
||||||
implName: string;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param {object} client - authentication backend or vault client
|
|
||||||
* @param {string} implName - implementation name for auth backend
|
|
||||||
*/
|
|
||||||
constructor(client: any, implName: string) {
|
|
||||||
this.client = client;
|
|
||||||
this.implName = implName;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* authenticateV2Request
|
|
||||||
*
|
|
||||||
* @param params - the authentication parameters as returned by
|
|
||||||
* auth.extractParams
|
|
||||||
* @param params.version - shall equal 2
|
|
||||||
* @param params.data.accessKey - the user's accessKey
|
|
||||||
* @param params.data.signatureFromRequest - the signature read
|
|
||||||
* from the request
|
|
||||||
* @param params.data.stringToSign - the stringToSign
|
|
||||||
* @param params.data.algo - the hashing algorithm used for the
|
|
||||||
* signature
|
|
||||||
* @param params.data.authType - the type of authentication (query
|
|
||||||
* or header)
|
|
||||||
* @param params.data.signatureVersion - the version of the
|
|
||||||
* signature (AWS or AWS4)
|
|
||||||
* @param [params.data.signatureAge] - the age of the signature in
|
|
||||||
* ms
|
|
||||||
* @param params.data.log - the logger object
|
|
||||||
* @param {RequestContext []} requestContexts - an array of RequestContext
|
|
||||||
* instances which contain information for policy authorization check
|
|
||||||
* @param callback - callback with either error or user info
|
|
||||||
*/
|
|
||||||
authenticateV2Request(
|
|
||||||
params: {
|
|
||||||
version: 2;
|
|
||||||
log: Logger;
|
|
||||||
data: {
|
|
||||||
securityToken: string;
|
|
||||||
accessKey: string;
|
|
||||||
signatureFromRequest: string;
|
|
||||||
stringToSign: string;
|
|
||||||
algo: string;
|
|
||||||
authType: 'query' | 'header';
|
|
||||||
signatureVersion: string;
|
|
||||||
signatureAge?: number;
|
|
||||||
log: Logger;
|
|
||||||
};
|
|
||||||
},
|
|
||||||
requestContexts: any[],
|
|
||||||
callback: (err: Error | null, data?: any) => void
|
|
||||||
) {
|
|
||||||
params.log.debug('authenticating V2 request');
|
|
||||||
let serializedRCsArr: any;
|
|
||||||
if (requestContexts) {
|
|
||||||
serializedRCsArr = requestContexts.map(rc => rc.serialize());
|
|
||||||
}
|
|
||||||
this.client.verifySignatureV2(
|
|
||||||
params.data.stringToSign,
|
|
||||||
params.data.signatureFromRequest,
|
|
||||||
params.data.accessKey,
|
|
||||||
{
|
|
||||||
algo: params.data.algo,
|
|
||||||
// @ts-ignore
|
|
||||||
reqUid: params.log.getSerializedUids(),
|
|
||||||
logger: params.log,
|
|
||||||
securityToken: params.data.securityToken,
|
|
||||||
requestContext: serializedRCsArr,
|
|
||||||
},
|
|
||||||
(err: Error | null, userInfo?: any) => vaultSignatureCb(err, userInfo,
|
|
||||||
params.log, callback),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/** authenticateV4Request
|
|
||||||
* @param params - the authentication parameters as returned by
|
|
||||||
* auth.extractParams
|
|
||||||
* @param params.version - shall equal 4
|
|
||||||
* @param params.data.log - the logger object
|
|
||||||
* @param params.data.accessKey - the user's accessKey
|
|
||||||
* @param params.data.signatureFromRequest - the signature read
|
|
||||||
* from the request
|
|
||||||
* @param params.data.region - the AWS region
|
|
||||||
* @param params.data.stringToSign - the stringToSign
|
|
||||||
* @param params.data.scopeDate - the timespan to allow the request
|
|
||||||
* @param params.data.authType - the type of authentication (query
|
|
||||||
* or header)
|
|
||||||
* @param params.data.signatureVersion - the version of the
|
|
||||||
* signature (AWS or AWS4)
|
|
||||||
* @param params.data.signatureAge - the age of the signature in ms
|
|
||||||
* @param params.data.timestamp - signaure timestamp
|
|
||||||
* @param params.credentialScope - credentialScope for signature
|
|
||||||
* @param {RequestContext [] | null} requestContexts -
|
|
||||||
* an array of RequestContext or null if authenticaiton of a chunk
|
|
||||||
* in streamingv4 auth
|
|
||||||
* instances which contain information for policy authorization check
|
|
||||||
* @param callback - callback with either error or user info
|
|
||||||
*/
|
|
||||||
authenticateV4Request(
|
|
||||||
params: AuthV4RequestParams,
|
|
||||||
requestContexts: any[] | null,
|
|
||||||
callback: (err: Error | null, data?: any) => void
|
|
||||||
) {
|
|
||||||
params.log.debug('authenticating V4 request');
|
|
||||||
let serializedRCs: any;
|
|
||||||
if (requestContexts) {
|
|
||||||
serializedRCs = requestContexts.map(rc => rc.serialize());
|
|
||||||
}
|
|
||||||
const streamingV4Params = {
|
|
||||||
accessKey: params.data.accessKey,
|
|
||||||
signatureFromRequest: params.data.signatureFromRequest,
|
|
||||||
region: params.data.region,
|
|
||||||
scopeDate: params.data.scopeDate,
|
|
||||||
timestamp: params.data.timestamp,
|
|
||||||
credentialScope: params.data.credentialScope };
|
|
||||||
this.client.verifySignatureV4(
|
|
||||||
params.data.stringToSign,
|
|
||||||
params.data.signatureFromRequest,
|
|
||||||
params.data.accessKey,
|
|
||||||
params.data.region,
|
|
||||||
params.data.scopeDate,
|
|
||||||
{
|
|
||||||
// @ts-ignore
|
|
||||||
reqUid: params.log.getSerializedUids(),
|
|
||||||
logger: params.log,
|
|
||||||
securityToken: params.data.securityToken,
|
|
||||||
requestContext: serializedRCs,
|
|
||||||
},
|
|
||||||
(err: Error | null, userInfo?: any) => vaultSignatureCb(err, userInfo,
|
|
||||||
params.log, callback, streamingV4Params),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/** getCanonicalIds -- call Vault to get canonicalIDs based on email
|
|
||||||
* addresses
|
|
||||||
* @param emailAddresses - list of emailAddresses
|
|
||||||
* @param log - log object
|
|
||||||
* @param callback - callback with either error or an array
|
|
||||||
* of objects with each object containing the canonicalID and emailAddress
|
|
||||||
* of an account as properties
|
|
||||||
*/
|
|
||||||
getCanonicalIds(
|
|
||||||
emailAddresses: string[],
|
|
||||||
log: Logger,
|
|
||||||
callback: (
|
|
||||||
err: Error | null,
|
|
||||||
data?: { canonicalID: string; email: string }[]
|
|
||||||
) => void
|
|
||||||
) {
|
|
||||||
log.trace('getting canonicalIDs from Vault based on emailAddresses',
|
|
||||||
{ emailAddresses });
|
|
||||||
this.client.getCanonicalIds(emailAddresses,
|
|
||||||
// @ts-ignore
|
|
||||||
{ reqUid: log.getSerializedUids() },
|
|
||||||
(err: Error | null, info?: any) => {
|
|
||||||
if (err) {
|
|
||||||
log.debug('received error message from auth provider',
|
|
||||||
{ errorMessage: err });
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
const infoFromVault = info.message.body;
|
|
||||||
log.trace('info received from vault', { infoFromVault });
|
|
||||||
const foundIds: { canonicalID: string; email: string }[] = [];
|
|
||||||
for (let i = 0; i < Object.keys(infoFromVault).length; i++) {
|
|
||||||
const key = Object.keys(infoFromVault)[i];
|
|
||||||
if (infoFromVault[key] === 'WrongFormat'
|
|
||||||
|| infoFromVault[key] === 'NotFound') {
|
|
||||||
return callback(errors.UnresolvableGrantByEmailAddress);
|
|
||||||
}
|
|
||||||
foundIds.push({
|
|
||||||
email: key,
|
|
||||||
canonicalID: infoFromVault[key],
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return callback(null, foundIds);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/** getEmailAddresses -- call Vault to get email addresses based on
|
|
||||||
* canonicalIDs
|
|
||||||
* @param canonicalIDs - list of canonicalIDs
|
|
||||||
* @param log - log object
|
|
||||||
* @param callback - callback with either error or an object
|
|
||||||
* with canonicalID keys and email address values
|
|
||||||
*/
|
|
||||||
getEmailAddresses(
|
|
||||||
canonicalIDs: string[],
|
|
||||||
log: Logger,
|
|
||||||
callback: (err: Error | null, data?: { [key: string]: any }) => void
|
|
||||||
) {
|
|
||||||
log.trace('getting emailAddresses from Vault based on canonicalIDs',
|
|
||||||
{ canonicalIDs });
|
|
||||||
this.client.getEmailAddresses(canonicalIDs,
|
|
||||||
// @ts-ignore
|
|
||||||
{ reqUid: log.getSerializedUids() },
|
|
||||||
(err: Error | null, info?: any) => {
|
|
||||||
if (err) {
|
|
||||||
log.debug('received error message from vault',
|
|
||||||
{ errorMessage: err });
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
const infoFromVault = info.message.body;
|
|
||||||
log.trace('info received from vault', { infoFromVault });
|
|
||||||
const result = {};
|
|
||||||
/* If the email address was not found in Vault, do not
|
|
||||||
send the canonicalID back to the API */
|
|
||||||
Object.keys(infoFromVault).forEach(key => {
|
|
||||||
if (infoFromVault[key] !== 'NotFound' &&
|
|
||||||
infoFromVault[key] !== 'WrongFormat') {
|
|
||||||
result[key] = infoFromVault[key];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return callback(null, result);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/** getAccountIds -- call Vault to get accountIds based on
|
|
||||||
* canonicalIDs
|
|
||||||
* @param canonicalIDs - list of canonicalIDs
|
|
||||||
* @param log - log object
|
|
||||||
* @param callback - callback with either error or an object
|
|
||||||
* with canonicalID keys and accountId values
|
|
||||||
*/
|
|
||||||
getAccountIds(
|
|
||||||
canonicalIDs: string[],
|
|
||||||
log: Logger,
|
|
||||||
callback: (err: Error | null, data?: { [key: string]: string }) => void
|
|
||||||
) {
|
|
||||||
log.trace('getting accountIds from Vault based on canonicalIDs',
|
|
||||||
{ canonicalIDs });
|
|
||||||
this.client.getAccountIds(canonicalIDs,
|
|
||||||
// @ts-expect-error
|
|
||||||
{ reqUid: log.getSerializedUids() },
|
|
||||||
(err: Error | null, info?: any) => {
|
|
||||||
if (err) {
|
|
||||||
log.debug('received error message from vault',
|
|
||||||
{ errorMessage: err });
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
const infoFromVault = info.message.body;
|
|
||||||
log.trace('info received from vault', { infoFromVault });
|
|
||||||
const result = {};
|
|
||||||
/* If the accountId was not found in Vault, do not
|
|
||||||
send the canonicalID back to the API */
|
|
||||||
Object.keys(infoFromVault).forEach(key => {
|
|
||||||
if (infoFromVault[key] !== 'NotFound' &&
|
|
||||||
infoFromVault[key] !== 'WrongFormat') {
|
|
||||||
result[key] = infoFromVault[key];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return callback(null, result);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/** checkPolicies -- call Vault to evaluate policies
|
|
||||||
* @param {object} requestContextParams - parameters needed to construct
|
|
||||||
* requestContext in Vault
|
|
||||||
* @param {object} requestContextParams.constantParams - params that have
|
|
||||||
* the same value for each requestContext to be constructed in Vault
|
|
||||||
* @param {object} requestContextParams.paramaterize - params that have
|
|
||||||
* arrays as values since a requestContext needs to be constructed with
|
|
||||||
* each option in Vault
|
|
||||||
* @param {string} userArn - arn of requesting user
|
|
||||||
* @param {object} log - log object
|
|
||||||
* @param {function} callback - callback with either error or an array
|
|
||||||
* of authorization results
|
|
||||||
*/
|
|
||||||
checkPolicies(
|
|
||||||
requestContextParams: any[],
|
|
||||||
userArn: string,
|
|
||||||
log: Logger,
|
|
||||||
callback: (err: Error | null, data?: any[]) => void
|
|
||||||
) {
|
|
||||||
log.trace('sending request context params to vault to evaluate' +
|
|
||||||
'policies');
|
|
||||||
this.client.checkPolicies(requestContextParams, userArn, {
|
|
||||||
// @ts-ignore
|
|
||||||
reqUid: log.getSerializedUids(),
|
|
||||||
}, (err: Error | null, info?: any) => {
|
|
||||||
if (err) {
|
|
||||||
log.debug('received error message from auth provider',
|
|
||||||
{ error: err });
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
const result = info.message.body;
|
|
||||||
return callback(null, result);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
checkHealth(log: Logger, callback: (err: Error | null, data?: any) => void) {
|
|
||||||
if (!this.client.healthcheck) {
|
|
||||||
const defResp = {};
|
|
||||||
defResp[this.implName] = { code: 200, message: 'OK' };
|
|
||||||
return callback(null, defResp);
|
|
||||||
}
|
|
||||||
// @ts-ignore
|
|
||||||
return this.client.healthcheck(log.getSerializedUids(), (err: Error | null, obj?: any) => {
|
|
||||||
const respBody = {};
|
|
||||||
if (err) {
|
|
||||||
log.debug(`error from ${this.implName}`, { error: err });
|
|
||||||
respBody[this.implName] = {
|
|
||||||
error: err,
|
|
||||||
};
|
|
||||||
// error returned as null so async parallel doesn't return
|
|
||||||
// before all backends are checked
|
|
||||||
return callback(null, respBody);
|
|
||||||
}
|
|
||||||
respBody[this.implName] = {
|
|
||||||
code: 200,
|
|
||||||
message: 'OK',
|
|
||||||
body: obj,
|
|
||||||
};
|
|
||||||
return callback(null, respBody);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
report(log: Logger, callback: (err: Error | null, data?: any) => void) {
|
|
||||||
// call the report function of the client
|
|
||||||
if (!this.client.report) {
|
|
||||||
return callback(null, {});
|
|
||||||
}
|
|
||||||
// @ts-ignore
|
|
||||||
return this.client.report(log.getSerializedUids(), (err: Error | null, obj?: any) => {
|
|
||||||
if (err) {
|
|
||||||
log.debug(`error from ${this.implName}`, { error: err });
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
return callback(null, obj);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
265
lib/auth/auth.ts
|
@ -1,265 +0,0 @@
|
||||||
import * as crypto from 'crypto';
|
|
||||||
import { Logger } from 'werelogs';
|
|
||||||
import errors from '../errors';
|
|
||||||
import * as queryString from 'querystring';
|
|
||||||
import AuthInfo from './AuthInfo';
|
|
||||||
import * as v2 from './v2/authV2';
|
|
||||||
import * as v4 from './v4/authV4';
|
|
||||||
import * as constants from '../constants';
|
|
||||||
import constructStringToSignV2 from './v2/constructStringToSign';
|
|
||||||
import constructStringToSignV4 from './v4/constructStringToSign';
|
|
||||||
import { convertUTCtoISO8601 } from './v4/timeUtils';
|
|
||||||
import * as vaultUtilities from './backends/in_memory/vaultUtilities';
|
|
||||||
import * as inMemoryBackend from './backends/in_memory/Backend';
|
|
||||||
import baseBackend from './backends/base';
|
|
||||||
import chainBackend from './backends/ChainBackend';
|
|
||||||
import validateAuthConfig from './backends/in_memory/validateAuthConfig';
|
|
||||||
import AuthLoader from './backends/in_memory/AuthLoader';
|
|
||||||
import Vault from './Vault';
|
|
||||||
|
|
||||||
let vault: Vault | null = null;
|
|
||||||
const auth = {};
|
|
||||||
const checkFunctions = {
|
|
||||||
v2: {
|
|
||||||
headers: v2.header.check,
|
|
||||||
query: v2.query.check,
|
|
||||||
},
|
|
||||||
v4: {
|
|
||||||
headers: v4.header.check,
|
|
||||||
query: v4.query.check,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// If no auth information is provided in request, then user is part of
|
|
||||||
// 'All Users Group' so use this group as the canonicalID for the publicUser
|
|
||||||
const publicUserInfo = new AuthInfo({ canonicalID: constants.publicId });
|
|
||||||
|
|
||||||
function setAuthHandler(handler: Vault) {
|
|
||||||
vault = handler;
|
|
||||||
return auth;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function will check validity of request parameters to authenticate
|
|
||||||
*
|
|
||||||
* @param request - Http request object
|
|
||||||
* @param log - Logger object
|
|
||||||
* @param awsService - Aws service related
|
|
||||||
* @param data - Parameters from queryString parsing or body of
|
|
||||||
* POST request
|
|
||||||
*
|
|
||||||
* @return ret
|
|
||||||
* @return ret.err - arsenal.errors object if any error was found
|
|
||||||
* @return ret.params - auth parameters to use later on for signature
|
|
||||||
* computation and check
|
|
||||||
* @return ret.params.version - the auth scheme version
|
|
||||||
* (undefined, 2, 4)
|
|
||||||
* @return ret.params.data - the auth scheme's specific data
|
|
||||||
*/
|
|
||||||
function extractParams(
|
|
||||||
request: any,
|
|
||||||
log: Logger,
|
|
||||||
awsService: string,
|
|
||||||
data: { [key: string]: string }
|
|
||||||
) {
|
|
||||||
log.trace('entered', { method: 'Arsenal.auth.server.extractParams' });
|
|
||||||
const authHeader = request.headers.authorization;
|
|
||||||
let version: 'v2' |'v4' | null = null;
|
|
||||||
let method: 'query' | 'headers' | null = null;
|
|
||||||
|
|
||||||
// Identify auth version and method to dispatch to the right check function
|
|
||||||
if (authHeader) {
|
|
||||||
method = 'headers';
|
|
||||||
// TODO: Check for security token header to handle temporary security
|
|
||||||
// credentials
|
|
||||||
if (authHeader.startsWith('AWS ')) {
|
|
||||||
version = 'v2';
|
|
||||||
} else if (authHeader.startsWith('AWS4')) {
|
|
||||||
version = 'v4';
|
|
||||||
} else {
|
|
||||||
log.trace('invalid authorization security header',
|
|
||||||
{ header: authHeader });
|
|
||||||
return { err: errors.AccessDenied };
|
|
||||||
}
|
|
||||||
} else if (data.Signature) {
|
|
||||||
method = 'query';
|
|
||||||
version = 'v2';
|
|
||||||
} else if (data['X-Amz-Algorithm']) {
|
|
||||||
method = 'query';
|
|
||||||
version = 'v4';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Here, either both values are set, or none is set
|
|
||||||
if (version !== null && method !== null) {
|
|
||||||
if (!checkFunctions[version] || !checkFunctions[version][method]) {
|
|
||||||
log.trace('invalid auth version or method',
|
|
||||||
{ version, authMethod: method });
|
|
||||||
return { err: errors.NotImplemented };
|
|
||||||
}
|
|
||||||
log.trace('identified auth method', { version, authMethod: method });
|
|
||||||
return checkFunctions[version][method](request, log, data, awsService);
|
|
||||||
}
|
|
||||||
|
|
||||||
// no auth info identified
|
|
||||||
log.debug('assuming public user');
|
|
||||||
return { err: null, params: publicUserInfo };
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function will check validity of request parameters to authenticate
|
|
||||||
*
|
|
||||||
* @param request - Http request object
|
|
||||||
* @param log - Logger object
|
|
||||||
* @param cb - the callback
|
|
||||||
* @param awsService - Aws service related
|
|
||||||
* @param {RequestContext[] | null} requestContexts - array of RequestContext
|
|
||||||
* or null if no requestContexts to be sent to Vault (for instance,
|
|
||||||
* in multi-object delete request)
|
|
||||||
*/
|
|
||||||
function doAuth(
|
|
||||||
request: any,
|
|
||||||
log: Logger,
|
|
||||||
cb: (err: Error | null, data?: any) => void,
|
|
||||||
awsService: string,
|
|
||||||
requestContexts: any[] | null
|
|
||||||
) {
|
|
||||||
const res = extractParams(request, log, awsService, request.query);
|
|
||||||
if (res.err) {
|
|
||||||
return cb(res.err);
|
|
||||||
} else if (res.params instanceof AuthInfo) {
|
|
||||||
return cb(null, res.params);
|
|
||||||
}
|
|
||||||
if (requestContexts) {
|
|
||||||
requestContexts.forEach((requestContext) => {
|
|
||||||
const { params } = res
|
|
||||||
if ('data' in params) {
|
|
||||||
const { data } = params
|
|
||||||
requestContext.setAuthType(data.authType);
|
|
||||||
requestContext.setSignatureVersion(data.signatureVersion);
|
|
||||||
requestContext.setSecurityToken(data.securityToken);
|
|
||||||
if ('signatureAge' in data) {
|
|
||||||
requestContext.setSignatureAge(data.signatureAge);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Corner cases managed, we're left with normal auth
|
|
||||||
// TODO What's happening here?
|
|
||||||
// @ts-ignore
|
|
||||||
res.params.log = log;
|
|
||||||
if (res.params.version === 2) {
|
|
||||||
// @ts-ignore
|
|
||||||
return vault!.authenticateV2Request(res.params, requestContexts, cb);
|
|
||||||
}
|
|
||||||
if (res.params.version === 4) {
|
|
||||||
// @ts-ignore
|
|
||||||
return vault!.authenticateV4Request(res.params, requestContexts, cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
log.error('authentication method not found', {
|
|
||||||
method: 'Arsenal.auth.doAuth',
|
|
||||||
});
|
|
||||||
return cb(errors.InternalError);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function will generate a version 4 content-md5 header
|
|
||||||
* It looks at the request path to determine what kind of header encoding is required
|
|
||||||
*
|
|
||||||
* @param path - the request path
|
|
||||||
* @param payload - the request payload to hash
|
|
||||||
*/
|
|
||||||
function generateContentMD5Header(
|
|
||||||
path: string,
|
|
||||||
payload: string,
|
|
||||||
) {
|
|
||||||
const encoding = path && path.startsWith('/_/backbeat/') ? 'hex' : 'base64';
|
|
||||||
return crypto.createHash('md5').update(payload, 'binary').digest(encoding);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* This function will generate a version 4 header
|
|
||||||
*
|
|
||||||
* @param request - Http request object
|
|
||||||
* @param data - Parameters from queryString parsing or body of
|
|
||||||
* POST request
|
|
||||||
* @param accessKey - the accessKey
|
|
||||||
* @param secretKeyValue - the secretKey
|
|
||||||
* @param awsService - Aws service related
|
|
||||||
* @param [proxyPath] - path that gets proxied by reverse proxy
|
|
||||||
* @param [sessionToken] - security token if the access/secret keys
|
|
||||||
* are temporary credentials from STS
|
|
||||||
* @param [payload] - body of the request if any
|
|
||||||
*/
|
|
||||||
function generateV4Headers(
|
|
||||||
request: any,
|
|
||||||
data: { [key: string]: string },
|
|
||||||
accessKey: string,
|
|
||||||
secretKeyValue: string,
|
|
||||||
awsService: string,
|
|
||||||
proxyPath?: string,
|
|
||||||
sessionToken?: string,
|
|
||||||
payload?: string,
|
|
||||||
) {
|
|
||||||
Object.assign(request, { headers: {} });
|
|
||||||
const amzDate = convertUTCtoISO8601(Date.now());
|
|
||||||
// get date without time
|
|
||||||
const scopeDate = amzDate.slice(0, amzDate.indexOf('T'));
|
|
||||||
const region = 'us-east-1';
|
|
||||||
const service = awsService || 'iam';
|
|
||||||
const credentialScope =
|
|
||||||
`${scopeDate}/${region}/${service}/aws4_request`;
|
|
||||||
const timestamp = amzDate;
|
|
||||||
const algorithm = 'AWS4-HMAC-SHA256';
|
|
||||||
|
|
||||||
payload = payload || '';
|
|
||||||
if (request.method === 'POST') {
|
|
||||||
payload = queryString.stringify(data, undefined, undefined, {
|
|
||||||
encodeURIComponent,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const payloadChecksum = crypto.createHash('sha256')
|
|
||||||
.update(payload, 'binary').digest('hex');
|
|
||||||
request.setHeader('host', request._headers.host);
|
|
||||||
request.setHeader('x-amz-date', amzDate);
|
|
||||||
request.setHeader('x-amz-content-sha256', payloadChecksum);
|
|
||||||
request.setHeader('content-md5', generateContentMD5Header(request.path, payload));
|
|
||||||
|
|
||||||
if (sessionToken) {
|
|
||||||
request.setHeader('x-amz-security-token', sessionToken);
|
|
||||||
}
|
|
||||||
|
|
||||||
Object.assign(request.headers, request._headers);
|
|
||||||
const signedHeaders = Object.keys(request._headers)
|
|
||||||
.filter(headerName =>
|
|
||||||
headerName.startsWith('x-amz-')
|
|
||||||
|| headerName.startsWith('x-scal-')
|
|
||||||
|| headerName === 'content-md5'
|
|
||||||
|| headerName === 'host',
|
|
||||||
).sort().join(';');
|
|
||||||
const params = { request, signedHeaders, payloadChecksum,
|
|
||||||
credentialScope, timestamp, query: data,
|
|
||||||
awsService: service, proxyPath };
|
|
||||||
const stringToSign = constructStringToSignV4(params);
|
|
||||||
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue,
|
|
||||||
region,
|
|
||||||
scopeDate,
|
|
||||||
service);
|
|
||||||
const signature = crypto.createHmac('sha256', signingKey)
|
|
||||||
.update(stringToSign as string, 'binary').digest('hex');
|
|
||||||
const authorizationHeader = `${algorithm} Credential=${accessKey}` +
|
|
||||||
`/${credentialScope}, SignedHeaders=${signedHeaders}, ` +
|
|
||||||
`Signature=${signature}`;
|
|
||||||
request.setHeader('authorization', authorizationHeader);
|
|
||||||
Object.assign(request, { headers: {} });
|
|
||||||
}
|
|
||||||
|
|
||||||
export const server = { extractParams, doAuth }
|
|
||||||
export const client = { generateV4Headers, constructStringToSignV2 }
|
|
||||||
export const inMemory = { backend: inMemoryBackend, validateAuthConfig, AuthLoader }
|
|
||||||
export const backends = { baseBackend, chainBackend }
|
|
||||||
export {
|
|
||||||
setAuthHandler as setHandler,
|
|
||||||
AuthInfo,
|
|
||||||
Vault
|
|
||||||
}
|
|
|
@ -1,233 +0,0 @@
|
||||||
import assert from 'assert';
|
|
||||||
import async from 'async';
|
|
||||||
import errors from '../../errors';
|
|
||||||
import BaseBackend from './base';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Class that provides an authentication backend that will verify signatures
|
|
||||||
* and retrieve emails and canonical ids associated with an account using a
|
|
||||||
* given list of authentication backends and vault clients.
|
|
||||||
*
|
|
||||||
* @class ChainBackend
|
|
||||||
*/
|
|
||||||
export default class ChainBackend extends BaseBackend {
|
|
||||||
_clients: any[];
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param {string} service - service id
|
|
||||||
* @param {object[]} clients - list of authentication backends or vault clients
|
|
||||||
*/
|
|
||||||
constructor(service: string, clients: any[]) {
|
|
||||||
super(service);
|
|
||||||
|
|
||||||
assert(Array.isArray(clients) && clients.length > 0, 'invalid client list');
|
|
||||||
assert(clients.every(client =>
|
|
||||||
typeof client.verifySignatureV4 === 'function' &&
|
|
||||||
typeof client.verifySignatureV2 === 'function' &&
|
|
||||||
typeof client.getCanonicalIds === 'function' &&
|
|
||||||
typeof client.getEmailAddresses === 'function' &&
|
|
||||||
typeof client.checkPolicies === 'function' &&
|
|
||||||
typeof client.healthcheck === 'function',
|
|
||||||
), 'invalid client: missing required auth backend methods');
|
|
||||||
this._clients = clients;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/*
|
|
||||||
* try task against each client for one to be successful
|
|
||||||
*/
|
|
||||||
_tryEachClient(task: any, cb: any) {
|
|
||||||
// @ts-ignore
|
|
||||||
async.tryEach(this._clients.map(client => done => task(client, done)), cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* apply task to all clients
|
|
||||||
*/
|
|
||||||
_forEachClient(task: any, cb: any) {
|
|
||||||
async.map(this._clients, task, cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
verifySignatureV2(
|
|
||||||
stringToSign: string,
|
|
||||||
signatureFromRequest: string,
|
|
||||||
accessKey: string,
|
|
||||||
options: any,
|
|
||||||
callback: any,
|
|
||||||
) {
|
|
||||||
this._tryEachClient((client, done) => client.verifySignatureV2(
|
|
||||||
stringToSign,
|
|
||||||
signatureFromRequest,
|
|
||||||
accessKey,
|
|
||||||
options,
|
|
||||||
done,
|
|
||||||
), callback);
|
|
||||||
}
|
|
||||||
|
|
||||||
verifySignatureV4(
|
|
||||||
stringToSign: string,
|
|
||||||
signatureFromRequest: string,
|
|
||||||
accessKey: string,
|
|
||||||
region: string,
|
|
||||||
scopeDate: string,
|
|
||||||
options: any,
|
|
||||||
callback: any,
|
|
||||||
) {
|
|
||||||
this._tryEachClient((client, done) => client.verifySignatureV4(
|
|
||||||
stringToSign,
|
|
||||||
signatureFromRequest,
|
|
||||||
accessKey,
|
|
||||||
region,
|
|
||||||
scopeDate,
|
|
||||||
options,
|
|
||||||
done,
|
|
||||||
), callback);
|
|
||||||
}
|
|
||||||
|
|
||||||
static _mergeObjects(objectResponses: any) {
|
|
||||||
return objectResponses.reduce(
|
|
||||||
(retObj, resObj) => Object.assign(retObj, resObj.message.body),
|
|
||||||
{});
|
|
||||||
}
|
|
||||||
|
|
||||||
getCanonicalIds(emailAddresses: string[], options: any, callback: any) {
|
|
||||||
this._forEachClient(
|
|
||||||
(client, done) => client.getCanonicalIds(emailAddresses, options, done),
|
|
||||||
(err, res) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
// TODO: atm naive merge, better handling of conflicting email results
|
|
||||||
return callback(null, {
|
|
||||||
message: {
|
|
||||||
body: ChainBackend._mergeObjects(res),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
getEmailAddresses(canonicalIDs: string[], options: any, callback: any) {
|
|
||||||
this._forEachClient(
|
|
||||||
(client, done) => client.getEmailAddresses(canonicalIDs, options, done),
|
|
||||||
(err, res) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
return callback(null, {
|
|
||||||
message: {
|
|
||||||
body: ChainBackend._mergeObjects(res),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* merge policy responses into a single message
|
|
||||||
*/
|
|
||||||
static _mergePolicies(policyResponses: any) {
|
|
||||||
const policyMap: any = {};
|
|
||||||
|
|
||||||
policyResponses.forEach(resp => {
|
|
||||||
if (!resp.message || !Array.isArray(resp.message.body)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const check = (policy) => {
|
|
||||||
const key = (policy.arn || '') + (policy.versionId || '') + (policy.action || '');
|
|
||||||
if (!policyMap[key] || !policyMap[key].isAllowed) {
|
|
||||||
policyMap[key] = policy;
|
|
||||||
}
|
|
||||||
// else is duplicate policy
|
|
||||||
};
|
|
||||||
|
|
||||||
resp.message.body.forEach(policy => {
|
|
||||||
if (Array.isArray(policy)) {
|
|
||||||
policy.forEach(authResult => check(authResult));
|
|
||||||
} else {
|
|
||||||
check(policy);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
return Object.keys(policyMap).map(key => {
|
|
||||||
const policyRes: any = { isAllowed: policyMap[key].isAllowed };
|
|
||||||
if (policyMap[key].arn !== '') {
|
|
||||||
policyRes.arn = policyMap[key].arn;
|
|
||||||
}
|
|
||||||
if (policyMap[key].versionId) {
|
|
||||||
policyRes.versionId = policyMap[key].versionId;
|
|
||||||
}
|
|
||||||
if (policyMap[key].isImplicit !== undefined) {
|
|
||||||
policyRes.isImplicit = policyMap[key].isImplicit;
|
|
||||||
}
|
|
||||||
if (policyMap[key].action) {
|
|
||||||
policyRes.action = policyMap[key].action;
|
|
||||||
}
|
|
||||||
return policyRes;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
response format:
|
|
||||||
{ message: {
|
|
||||||
body: [{}],
|
|
||||||
code: number,
|
|
||||||
message: string,
|
|
||||||
} }
|
|
||||||
*/
|
|
||||||
checkPolicies(requestContextParams: any, userArn: string, options: any, callback: any) {
|
|
||||||
this._forEachClient((client, done) => client.checkPolicies(
|
|
||||||
requestContextParams,
|
|
||||||
userArn,
|
|
||||||
options,
|
|
||||||
done,
|
|
||||||
), (err, res) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
return callback(null, {
|
|
||||||
message: {
|
|
||||||
body: ChainBackend._mergePolicies(res),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
healthcheck(reqUid: string, callback: any) {
|
|
||||||
this._forEachClient((client, done) =>
|
|
||||||
client.healthcheck(reqUid, (err, res) => done(null, {
|
|
||||||
error: !!err ? err : null,
|
|
||||||
status: res,
|
|
||||||
}),
|
|
||||||
), (err, res) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
|
|
||||||
const isError = res.some(results => !!results.error);
|
|
||||||
if (isError) {
|
|
||||||
return callback(errors.InternalError, res);
|
|
||||||
}
|
|
||||||
return callback(null, res);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
report(reqUid: string, callback: any) {
|
|
||||||
this._forEachClient((client, done) =>
|
|
||||||
client.report(reqUid, done),
|
|
||||||
(err, res) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
const mergedRes = res.reduce((acc, val) => {
|
|
||||||
Object.keys(val).forEach(k => {
|
|
||||||
acc[k] = val[k];
|
|
||||||
});
|
|
||||||
return acc;
|
|
||||||
}, {});
|
|
||||||
|
|
||||||
return callback(null, mergedRes);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,96 +0,0 @@
|
||||||
import errors from '../../errors';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Base backend class
|
|
||||||
*
|
|
||||||
* @class BaseBackend
|
|
||||||
*/
|
|
||||||
export default class BaseBackend {
|
|
||||||
service: string;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param {string} service - service identifer for construction arn
|
|
||||||
*/
|
|
||||||
constructor(service: string) {
|
|
||||||
this.service = service;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** verifySignatureV2
|
|
||||||
* @param stringToSign - string to sign built per AWS rules
|
|
||||||
* @param signatureFromRequest - signature sent with request
|
|
||||||
* @param accessKey - account accessKey
|
|
||||||
* @param options - contains algorithm (SHA1 or SHA256)
|
|
||||||
* @param callback - callback with either error or user info
|
|
||||||
* @return calls callback
|
|
||||||
*/
|
|
||||||
verifySignatureV2(
|
|
||||||
stringToSign: string,
|
|
||||||
signatureFromRequest: string,
|
|
||||||
accessKey: string,
|
|
||||||
options: any,
|
|
||||||
callback: any
|
|
||||||
) {
|
|
||||||
return callback(errors.AuthMethodNotImplemented);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/** verifySignatureV4
|
|
||||||
* @param stringToSign - string to sign built per AWS rules
|
|
||||||
* @param signatureFromRequest - signature sent with request
|
|
||||||
* @param accessKey - account accessKey
|
|
||||||
* @param region - region specified in request credential
|
|
||||||
* @param scopeDate - date specified in request credential
|
|
||||||
* @param options - options to send to Vault
|
|
||||||
* (just contains reqUid for logging in Vault)
|
|
||||||
* @param callback - callback with either error or user info
|
|
||||||
* @return calls callback
|
|
||||||
*/
|
|
||||||
verifySignatureV4(
|
|
||||||
stringToSign: string,
|
|
||||||
signatureFromRequest: string,
|
|
||||||
accessKey: string,
|
|
||||||
region: string,
|
|
||||||
scopeDate: string,
|
|
||||||
options: any,
|
|
||||||
callback: any
|
|
||||||
) {
|
|
||||||
return callback(errors.AuthMethodNotImplemented);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets canonical ID's for a list of accounts
|
|
||||||
* based on email associated with account
|
|
||||||
* @param emails - list of email addresses
|
|
||||||
* @param options - to send log id to vault
|
|
||||||
* @param callback - callback to calling function
|
|
||||||
* @returns callback with either error or
|
|
||||||
* object with email addresses as keys and canonical IDs
|
|
||||||
* as values
|
|
||||||
*/
|
|
||||||
getCanonicalIds(emails: string[], options: any, callback: any) {
|
|
||||||
return callback(errors.AuthMethodNotImplemented);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets email addresses (referred to as diplay names for getACL's)
|
|
||||||
* for a list of accounts based on canonical IDs associated with account
|
|
||||||
* @param canonicalIDs - list of canonicalIDs
|
|
||||||
* @param options - to send log id to vault
|
|
||||||
* @param callback - callback to calling function
|
|
||||||
* @returns callback with either error or
|
|
||||||
* an object from Vault containing account canonicalID
|
|
||||||
* as each object key and an email address as the value (or "NotFound")
|
|
||||||
*/
|
|
||||||
getEmailAddresses(canonicalIDs: string[], options: any, callback: any) {
|
|
||||||
return callback(errors.AuthMethodNotImplemented);
|
|
||||||
}
|
|
||||||
|
|
||||||
checkPolicies(requestContextParams: any, userArn: string, options: any, callback: any) {
|
|
||||||
return callback(null, { message: { body: [] } });
|
|
||||||
}
|
|
||||||
|
|
||||||
healthcheck(reqUid: string, callback: any) {
|
|
||||||
return callback(null, { code: 200, message: 'OK' });
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,204 +0,0 @@
|
||||||
import * as fs from 'fs';
|
|
||||||
import glob from 'simple-glob';
|
|
||||||
import joi from 'joi';
|
|
||||||
import werelogs from 'werelogs';
|
|
||||||
import * as types from './types';
|
|
||||||
import { Account, Accounts } from './types';
|
|
||||||
import ARN from '../../../models/ARN';
|
|
||||||
|
|
||||||
/** Load authentication information from files or pre-loaded account objects */
|
|
||||||
export default class AuthLoader {
|
|
||||||
#log: werelogs.Logger;
|
|
||||||
#authData: Accounts;
|
|
||||||
#isValid: 'waiting-for-validation' | 'valid' | 'invalid';
|
|
||||||
|
|
||||||
constructor(logApi: { Logger: typeof werelogs.Logger } = werelogs) {
|
|
||||||
this.#log = new logApi.Logger('S3');
|
|
||||||
this.#authData = { accounts: [] };
|
|
||||||
this.#isValid = 'waiting-for-validation';
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Add one or more accounts to the authentication info */
|
|
||||||
addAccounts(authData: Accounts, filePath?: string) {
|
|
||||||
const isValid = this.#isAuthDataValid(authData, filePath);
|
|
||||||
if (isValid) {
|
|
||||||
this.#authData.accounts = [
|
|
||||||
...this.#authData.accounts,
|
|
||||||
...authData.accounts,
|
|
||||||
];
|
|
||||||
// defer validity checking when getting data to avoid
|
|
||||||
// logging multiple times the errors (we need to validate
|
|
||||||
// all accounts at once to detect duplicate values)
|
|
||||||
if (this.#isValid === 'valid') {
|
|
||||||
this.#isValid = 'waiting-for-validation';
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
this.#isValid = 'invalid';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add account information from a file. Use { legacy: false } as an option
|
|
||||||
* to use the new, Promise-based version.
|
|
||||||
*
|
|
||||||
* @param filePath - file path containing JSON
|
|
||||||
* authentication info (see {@link addAccounts()} for format)
|
|
||||||
*/
|
|
||||||
addFile(filePath: string, options: { legacy: false }): Promise<void>;
|
|
||||||
/** @deprecated Please use Promise-version instead. */
|
|
||||||
addFile(filePath: string, options?: { legacy: true }): void;
|
|
||||||
addFile(filePath: string, options = { legacy: true }) {
|
|
||||||
// On deprecation, remove the legacy part and keep the promises.
|
|
||||||
const readFunc: any = options.legacy ? fs.readFileSync : fs.promises.readFile;
|
|
||||||
const readResult = readFunc(filePath, 'utf8') as Promise<string> | string;
|
|
||||||
const prom = Promise.resolve(readResult).then((data) => {
|
|
||||||
const authData = JSON.parse(data);
|
|
||||||
this.addAccounts(authData, filePath);
|
|
||||||
});
|
|
||||||
return options.legacy ? undefined : prom;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add account information from a filesystem path
|
|
||||||
*
|
|
||||||
* @param globPattern - filesystem glob pattern,
|
|
||||||
* can be a single string or an array of glob patterns. Globs
|
|
||||||
* can be simple file paths or can contain glob matching
|
|
||||||
* characters, like '/a/b/*.json'. The matching files are
|
|
||||||
* individually loaded as JSON and accounts are added. See
|
|
||||||
* {@link addAccounts()} for JSON format.
|
|
||||||
*/
|
|
||||||
addFilesByGlob(globPattern: string | string[]) {
|
|
||||||
// FIXME switch glob to async version
|
|
||||||
const files = glob(globPattern);
|
|
||||||
files.forEach((filePath) => this.addFile(filePath));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Perform validation on authentication info previously
|
|
||||||
* loaded. Note that it has to be done on the entire set after an
|
|
||||||
* update to catch duplicate account IDs or access keys.
|
|
||||||
*/
|
|
||||||
validate() {
|
|
||||||
if (this.#isValid === 'waiting-for-validation') {
|
|
||||||
const isValid = this.#isAuthDataValid(this.#authData);
|
|
||||||
this.#isValid = isValid ? 'valid' : 'invalid';
|
|
||||||
}
|
|
||||||
return this.#isValid === 'valid';
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get authentication info as a plain JS object containing all accounts
|
|
||||||
* under the "accounts" attribute, with validation.
|
|
||||||
*/
|
|
||||||
get data() {
|
|
||||||
return this.validate() ? this.#authData : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** backward-compat: ignore arn if starts with 'aws:' and log a warning */
|
|
||||||
#isNotLegacyAWSARN(account: Account, filePath?: string) {
|
|
||||||
if (account.arn.startsWith('aws:')) {
|
|
||||||
const { name: accountName, arn: accountArn } = account;
|
|
||||||
this.#log.error(
|
|
||||||
'account must have a valid AWS ARN, legacy examples ' +
|
|
||||||
"starting with 'aws:' are not supported anymore. " +
|
|
||||||
'Please convert to a proper account entry (see ' +
|
|
||||||
'examples at https://github.com/scality/S3/blob/' +
|
|
||||||
'master/conf/authdata.json). Also note that support ' +
|
|
||||||
'for account users has been dropped.',
|
|
||||||
{ accountName, accountArn, filePath }
|
|
||||||
);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
#isValidUsers(account: Account, filePath?: string) {
|
|
||||||
if (account.users) {
|
|
||||||
const { name: accountName, arn: accountArn } = account;
|
|
||||||
this.#log.error(
|
|
||||||
'support for account users has been dropped, consider ' +
|
|
||||||
'turning users into account entries (see examples at ' +
|
|
||||||
'https://github.com/scality/S3/blob/master/conf/' +
|
|
||||||
'authdata.json)',
|
|
||||||
{ accountName, accountArn, filePath }
|
|
||||||
);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
#isValidARN(account: Account, filePath?: string) {
|
|
||||||
const arnObj = ARN.createFromString(account.arn);
|
|
||||||
const { name: accountName, arn: accountArn } = account;
|
|
||||||
if (arnObj instanceof ARN) {
|
|
||||||
if (!arnObj.isIAMAccount()) {
|
|
||||||
this.#log.error('authentication config validation error', {
|
|
||||||
reason: 'not an IAM account ARN',
|
|
||||||
accountName,
|
|
||||||
accountArn,
|
|
||||||
filePath,
|
|
||||||
});
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
this.#log.error('authentication config validation error', {
|
|
||||||
reason: arnObj.error.description,
|
|
||||||
accountName,
|
|
||||||
accountArn,
|
|
||||||
filePath,
|
|
||||||
});
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
#isAuthDataValid(authData: any, filePath?: string) {
|
|
||||||
const options = { abortEarly: true };
|
|
||||||
const response = types.validators.accounts.validate(authData, options);
|
|
||||||
if (response.error) {
|
|
||||||
this.#dumpJoiErrors(response.error.details, filePath);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
const validAccounts = response.value.accounts.filter(
|
|
||||||
(account: Account) =>
|
|
||||||
this.#isNotLegacyAWSARN(account, filePath) &&
|
|
||||||
this.#isValidUsers(account, filePath) &&
|
|
||||||
this.#isValidARN(account, filePath)
|
|
||||||
);
|
|
||||||
const areSomeInvalidAccounts =
|
|
||||||
validAccounts.length !== response.value.accounts.length;
|
|
||||||
if (areSomeInvalidAccounts) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
const keys = validAccounts.flatMap((account) => account.keys);
|
|
||||||
const uniqueKeysValidator = types.validators.keys.unique('access');
|
|
||||||
const areKeysUnique = uniqueKeysValidator.validate(keys);
|
|
||||||
if (areKeysUnique.error) {
|
|
||||||
this.#dumpJoiErrors(areKeysUnique.error.details, filePath);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
#dumpJoiErrors(errors: joi.ValidationErrorItem[], filePath?: string) {
|
|
||||||
errors.forEach((err) => {
|
|
||||||
const baseLogInfo = { item: err.path, filePath };
|
|
||||||
const logInfo = () => {
|
|
||||||
if (err.type === 'array.unique') {
|
|
||||||
const reason = `duplicate value '${err.context?.path}'`;
|
|
||||||
const dupValue = err.context?.value[err.context.path];
|
|
||||||
return { ...baseLogInfo, reason, dupValue };
|
|
||||||
} else {
|
|
||||||
const reason = err.message;
|
|
||||||
const context = err.context;
|
|
||||||
return { ...baseLogInfo, reason, context };
|
|
||||||
}
|
|
||||||
};
|
|
||||||
this.#log.error(
|
|
||||||
'authentication config validation error',
|
|
||||||
logInfo()
|
|
||||||
);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,194 +0,0 @@
|
||||||
import crypto from 'crypto';
|
|
||||||
import { Logger } from 'werelogs';
|
|
||||||
import errors from '../../../errors';
|
|
||||||
import { calculateSigningKey, hashSignature } from './vaultUtilities';
|
|
||||||
import Indexer from './Indexer';
|
|
||||||
import BaseBackend from '../base';
|
|
||||||
import { Accounts } from './types';
|
|
||||||
|
|
||||||
function _formatResponse(userInfoToSend: any) {
|
|
||||||
return {
|
|
||||||
message: {
|
|
||||||
body: { userInfo: userInfoToSend },
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Class that provides a memory backend for verifying signatures and getting
|
|
||||||
* emails and canonical ids associated with an account.
|
|
||||||
*
|
|
||||||
* @class InMemoryBackend
|
|
||||||
*/
|
|
||||||
class InMemoryBackend extends BaseBackend {
|
|
||||||
indexer: Indexer;
|
|
||||||
formatResponse: any;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param service - service identifer for construction arn
|
|
||||||
* @param indexer - indexer instance for retrieving account info
|
|
||||||
* @param formatter - function which accepts user info to send
|
|
||||||
* back and returns it in an object
|
|
||||||
*/
|
|
||||||
constructor(service: string, indexer: Indexer, formatter: typeof _formatResponse) {
|
|
||||||
super(service);
|
|
||||||
this.indexer = indexer;
|
|
||||||
this.formatResponse = formatter;
|
|
||||||
}
|
|
||||||
|
|
||||||
verifySignatureV2(
|
|
||||||
stringToSign: string,
|
|
||||||
signatureFromRequest: string,
|
|
||||||
accessKey: string,
|
|
||||||
options: any,
|
|
||||||
callback: any,
|
|
||||||
) {
|
|
||||||
const entity = this.indexer.getEntityByKey(accessKey);
|
|
||||||
if (!entity) {
|
|
||||||
return callback(errors.InvalidAccessKeyId);
|
|
||||||
}
|
|
||||||
const secretKey = this.indexer.getSecretKey(entity, accessKey);
|
|
||||||
const reconstructedSig =
|
|
||||||
hashSignature(stringToSign, secretKey, options.algo);
|
|
||||||
if (signatureFromRequest !== reconstructedSig) {
|
|
||||||
return callback(errors.SignatureDoesNotMatch);
|
|
||||||
}
|
|
||||||
const userInfoToSend = {
|
|
||||||
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
|
||||||
canonicalID: entity.canonicalID,
|
|
||||||
arn: entity.arn,
|
|
||||||
// @ts-ignore
|
|
||||||
IAMdisplayName: entity.IAMdisplayName,
|
|
||||||
};
|
|
||||||
const vaultReturnObject = this.formatResponse(userInfoToSend);
|
|
||||||
return callback(null, vaultReturnObject);
|
|
||||||
}
|
|
||||||
|
|
||||||
verifySignatureV4(
|
|
||||||
stringToSign: string,
|
|
||||||
signatureFromRequest: string,
|
|
||||||
accessKey: string,
|
|
||||||
region: string,
|
|
||||||
scopeDate: string,
|
|
||||||
options: any,
|
|
||||||
callback: any,
|
|
||||||
) {
|
|
||||||
const entity = this.indexer.getEntityByKey(accessKey);
|
|
||||||
if (!entity) {
|
|
||||||
return callback(errors.InvalidAccessKeyId);
|
|
||||||
}
|
|
||||||
const secretKey = this.indexer.getSecretKey(entity, accessKey);
|
|
||||||
const signingKey = calculateSigningKey(secretKey, region, scopeDate);
|
|
||||||
const reconstructedSig = crypto.createHmac('sha256', signingKey)
|
|
||||||
.update(stringToSign, 'binary').digest('hex');
|
|
||||||
if (signatureFromRequest !== reconstructedSig) {
|
|
||||||
return callback(errors.SignatureDoesNotMatch);
|
|
||||||
}
|
|
||||||
const userInfoToSend = {
|
|
||||||
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
|
||||||
canonicalID: entity.canonicalID,
|
|
||||||
arn: entity.arn,
|
|
||||||
// @ts-ignore
|
|
||||||
IAMdisplayName: entity.IAMdisplayName,
|
|
||||||
};
|
|
||||||
const vaultReturnObject = this.formatResponse(userInfoToSend);
|
|
||||||
return callback(null, vaultReturnObject);
|
|
||||||
}
|
|
||||||
|
|
||||||
getCanonicalIds(emails: string[], log: Logger, cb: any) {
|
|
||||||
const results = {};
|
|
||||||
emails.forEach(email => {
|
|
||||||
const lowercasedEmail = email.toLowerCase();
|
|
||||||
const entity = this.indexer.getEntityByEmail(lowercasedEmail);
|
|
||||||
if (!entity) {
|
|
||||||
results[email] = 'NotFound';
|
|
||||||
} else {
|
|
||||||
results[email] =
|
|
||||||
entity.canonicalID;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
const vaultReturnObject = {
|
|
||||||
message: {
|
|
||||||
body: results,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
return cb(null, vaultReturnObject);
|
|
||||||
}
|
|
||||||
|
|
||||||
getEmailAddresses(canonicalIDs: string[], options: any, cb: any) {
|
|
||||||
const results = {};
|
|
||||||
canonicalIDs.forEach(canonicalId => {
|
|
||||||
const foundEntity = this.indexer.getEntityByCanId(canonicalId);
|
|
||||||
if (!foundEntity || !foundEntity.email) {
|
|
||||||
results[canonicalId] = 'NotFound';
|
|
||||||
} else {
|
|
||||||
results[canonicalId] = foundEntity.email;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
const vaultReturnObject = {
|
|
||||||
message: {
|
|
||||||
body: results,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
return cb(null, vaultReturnObject);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets accountIds for a list of accounts based on
|
|
||||||
* the canonical IDs associated with the account
|
|
||||||
* @param canonicalIDs - list of canonicalIDs
|
|
||||||
* @param options - to send log id to vault
|
|
||||||
* @param cb - callback to calling function
|
|
||||||
* @returns callback with either error or
|
|
||||||
* an object from Vault containing account canonicalID
|
|
||||||
* as each object key and an accountId as the value (or "NotFound")
|
|
||||||
*/
|
|
||||||
getAccountIds(canonicalIDs: string[], options: any, cb: any) {
|
|
||||||
const results = {};
|
|
||||||
canonicalIDs.forEach(canonicalID => {
|
|
||||||
const foundEntity = this.indexer.getEntityByCanId(canonicalID);
|
|
||||||
if (!foundEntity || !foundEntity.shortid) {
|
|
||||||
results[canonicalID] = 'Not Found';
|
|
||||||
} else {
|
|
||||||
results[canonicalID] = foundEntity.shortid;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
const vaultReturnObject = {
|
|
||||||
message: {
|
|
||||||
body: results,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
return cb(null, vaultReturnObject);
|
|
||||||
}
|
|
||||||
|
|
||||||
report(log: Logger, callback: any) {
|
|
||||||
return callback(null, {});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class S3AuthBackend extends InMemoryBackend {
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param authdata - the authentication config file's data
|
|
||||||
* @param authdata.accounts - array of account objects
|
|
||||||
* @param authdata.accounts[].name - account name
|
|
||||||
* @param authdata.accounts[].email - account email
|
|
||||||
* @param authdata.accounts[].arn - IAM resource name
|
|
||||||
* @param authdata.accounts[].canonicalID - account canonical ID
|
|
||||||
* @param authdata.accounts[].shortid - short account ID
|
|
||||||
* @param authdata.accounts[].keys - array of key objects
|
|
||||||
* @param authdata.accounts[].keys[].access - access key
|
|
||||||
* @param authdata.accounts[].keys[].secret - secret key
|
|
||||||
*/
|
|
||||||
constructor(authdata?: Accounts) {
|
|
||||||
super('s3', new Indexer(authdata), _formatResponse);
|
|
||||||
}
|
|
||||||
|
|
||||||
refreshAuthData(authData?: Accounts) {
|
|
||||||
this.indexer = new Indexer(authData);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export { S3AuthBackend as s3 }
|
|
|
@ -1,93 +0,0 @@
|
||||||
import { Accounts, Account, Entity } from './types';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Class that provides an internal indexing over the simple data provided by
|
|
||||||
* the authentication configuration file for the memory backend. This allows
|
|
||||||
* accessing the different authentication entities through various types of
|
|
||||||
* keys.
|
|
||||||
*/
|
|
||||||
export default class Indexer {
|
|
||||||
accountsBy: {
|
|
||||||
canId: { [id: string]: Entity | undefined },
|
|
||||||
accessKey: { [id: string]: Entity | undefined },
|
|
||||||
email: { [id: string]: Entity | undefined },
|
|
||||||
}
|
|
||||||
|
|
||||||
constructor(authdata?: Accounts) {
|
|
||||||
this.accountsBy = {
|
|
||||||
canId: {},
|
|
||||||
accessKey: {},
|
|
||||||
email: {},
|
|
||||||
};
|
|
||||||
|
|
||||||
/*
|
|
||||||
* This may happen if the application is configured to use another
|
|
||||||
* authentication backend than in-memory.
|
|
||||||
* As such, we're managing the error here to avoid screwing up there.
|
|
||||||
*/
|
|
||||||
if (!authdata) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
this.#build(authdata);
|
|
||||||
}
|
|
||||||
|
|
||||||
#indexAccount(account: Account) {
|
|
||||||
const accountData: Entity = {
|
|
||||||
arn: account.arn,
|
|
||||||
canonicalID: account.canonicalID,
|
|
||||||
shortid: account.shortid,
|
|
||||||
accountDisplayName: account.name,
|
|
||||||
email: account.email.toLowerCase(),
|
|
||||||
keys: [],
|
|
||||||
};
|
|
||||||
this.accountsBy.canId[accountData.canonicalID] = accountData;
|
|
||||||
this.accountsBy.email[accountData.email] = accountData;
|
|
||||||
if (account.keys !== undefined) {
|
|
||||||
account.keys.forEach(key => {
|
|
||||||
accountData.keys.push(key);
|
|
||||||
this.accountsBy.accessKey[key.access] = accountData;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#build(authdata: Accounts) {
|
|
||||||
authdata.accounts.forEach(account => {
|
|
||||||
this.#indexAccount(account);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/** This method returns the account associated to a canonical ID. */
|
|
||||||
getEntityByCanId(canId: string): Entity | undefined {
|
|
||||||
return this.accountsBy.canId[canId];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method returns the entity (either an account or a user) associated
|
|
||||||
* to a canonical ID.
|
|
||||||
* @param {string} key - The accessKey of the entity
|
|
||||||
*/
|
|
||||||
getEntityByKey(key: string): Entity | undefined {
|
|
||||||
return this.accountsBy.accessKey[key];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method returns the entity (either an account or a user) associated
|
|
||||||
* to an email address.
|
|
||||||
*/
|
|
||||||
getEntityByEmail(email: string): Entity | undefined {
|
|
||||||
const lowerCasedEmail = email.toLowerCase();
|
|
||||||
return this.accountsBy.email[lowerCasedEmail];
|
|
||||||
}
|
|
||||||
|
|
||||||
/** This method returns the secret key associated with the entity. */
|
|
||||||
getSecretKey(entity: Entity, accessKey: string) {
|
|
||||||
const keys = entity.keys.filter(kv => kv.access === accessKey);
|
|
||||||
return keys[0].secret;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** This method returns the account display name associated with the entity. */
|
|
||||||
getAcctDisplayName(entity: Entity) {
|
|
||||||
return entity.accountDisplayName;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,51 +0,0 @@
|
||||||
import joi from 'joi';
|
|
||||||
|
|
||||||
export type Callback<Data = any> = (err?: Error | null | undefined, data?: Data) => void;
|
|
||||||
|
|
||||||
export type Credentials = { access: string; secret: string };
|
|
||||||
export type Base = {
|
|
||||||
arn: string;
|
|
||||||
canonicalID: string;
|
|
||||||
shortid: string;
|
|
||||||
email: string;
|
|
||||||
keys: Credentials[];
|
|
||||||
};
|
|
||||||
export type Account = Base & { name: string; users: any[] };
|
|
||||||
export type Accounts = { accounts: Account[] };
|
|
||||||
export type Entity = Base & { accountDisplayName: string };
|
|
||||||
|
|
||||||
const keys = ((): joi.ArraySchema => {
|
|
||||||
const str = joi.string().required();
|
|
||||||
const items = { access: str, secret: str };
|
|
||||||
return joi.array().items(items).required();
|
|
||||||
})();
|
|
||||||
|
|
||||||
const account = (() => {
|
|
||||||
return joi.object<Account>({
|
|
||||||
name: joi.string().required(),
|
|
||||||
email: joi.string().email().required(),
|
|
||||||
arn: joi.string().required(),
|
|
||||||
canonicalID: joi.string().required(),
|
|
||||||
shortid: joi
|
|
||||||
.string()
|
|
||||||
.regex(/^[0-9]{12}$/)
|
|
||||||
.required(),
|
|
||||||
keys: keys,
|
|
||||||
// backward-compat
|
|
||||||
users: joi.array(),
|
|
||||||
});
|
|
||||||
})();
|
|
||||||
|
|
||||||
const accounts = (() => {
|
|
||||||
return joi.object<Accounts>({
|
|
||||||
accounts: joi
|
|
||||||
.array()
|
|
||||||
.items(account)
|
|
||||||
.required()
|
|
||||||
.unique('arn')
|
|
||||||
.unique('email')
|
|
||||||
.unique('canonicalID'),
|
|
||||||
});
|
|
||||||
})();
|
|
||||||
|
|
||||||
export const validators = { keys, account, accounts };
|
|
|
@ -1,16 +0,0 @@
|
||||||
import { Logger } from 'werelogs';
|
|
||||||
import AuthLoader from './AuthLoader';
|
|
||||||
import { Accounts } from './types';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @deprecated please use {@link AuthLoader} class instead
|
|
||||||
* @return true on erroneous data false on success
|
|
||||||
*/
|
|
||||||
export default function validateAuthConfig(
|
|
||||||
authdata: Accounts,
|
|
||||||
logApi?: { Logger: typeof Logger }
|
|
||||||
) {
|
|
||||||
const authLoader = new AuthLoader(logApi);
|
|
||||||
authLoader.addAccounts(authdata);
|
|
||||||
return !authLoader.validate();
|
|
||||||
}
|
|
|
@ -1,40 +0,0 @@
|
||||||
import * as crypto from 'crypto';
|
|
||||||
|
|
||||||
/** hashSignature for v2 Auth
|
|
||||||
* @param {string} stringToSign - built string to sign per AWS rules
|
|
||||||
* @param {string} secretKey - user's secretKey
|
|
||||||
* @param {string} algorithm - either SHA256 or SHA1
|
|
||||||
* @return {string} reconstructed signature
|
|
||||||
*/
|
|
||||||
export function hashSignature(
|
|
||||||
stringToSign: string,
|
|
||||||
secretKey: string,
|
|
||||||
algorithm: 'SHA256' | 'SHA1'
|
|
||||||
): string {
|
|
||||||
const hmacObject = crypto.createHmac(algorithm, secretKey);
|
|
||||||
return hmacObject.update(stringToSign, 'binary').digest('base64');
|
|
||||||
}
|
|
||||||
|
|
||||||
const sha256Digest = (key: string | Buffer, data: string) => {
|
|
||||||
return crypto.createHmac('sha256', key).update(data, 'binary').digest();
|
|
||||||
};
|
|
||||||
|
|
||||||
/** calculateSigningKey for v4 Auth
|
|
||||||
* @param {string} secretKey - requester's secretKey
|
|
||||||
* @param {string} region - region included in request
|
|
||||||
* @param {string} scopeDate - scopeDate included in request
|
|
||||||
* @param {string} [service] - To specify another service than s3
|
|
||||||
* @return {string} signingKey - signingKey to calculate signature
|
|
||||||
*/
|
|
||||||
export function calculateSigningKey(
|
|
||||||
secretKey: string,
|
|
||||||
region: string,
|
|
||||||
scopeDate: string,
|
|
||||||
service?: string
|
|
||||||
): Buffer {
|
|
||||||
const dateKey = sha256Digest(`AWS4${secretKey}`, scopeDate);
|
|
||||||
const dateRegionKey = sha256Digest(dateKey, region);
|
|
||||||
const dateRegionServiceKey = sha256Digest(dateRegionKey, service || 's3');
|
|
||||||
const signingKey = sha256Digest(dateRegionServiceKey, 'aws4_request');
|
|
||||||
return signingKey;
|
|
||||||
}
|
|
|
@ -1,16 +0,0 @@
|
||||||
export default function algoCheck(signatureLength: number) {
|
|
||||||
let algo: 'sha256' | 'sha1';
|
|
||||||
// If the signature sent is 44 characters,
|
|
||||||
// this means that sha256 was used:
|
|
||||||
// 44 characters in base64
|
|
||||||
const SHA256LEN = 44;
|
|
||||||
const SHA1LEN = 28;
|
|
||||||
if (signatureLength === SHA256LEN) {
|
|
||||||
algo = 'sha256';
|
|
||||||
}
|
|
||||||
if (signatureLength === SHA1LEN) {
|
|
||||||
algo = 'sha1';
|
|
||||||
}
|
|
||||||
// @ts-ignore
|
|
||||||
return algo;
|
|
||||||
}
|
|
|
@ -1,2 +0,0 @@
|
||||||
export * as header from './headerAuthCheck';
|
|
||||||
export * as query from './queryAuthCheck';
|
|
|
@ -1,34 +0,0 @@
|
||||||
import { Logger } from 'werelogs';
|
|
||||||
import errors from '../../errors';
|
|
||||||
|
|
||||||
const epochTime = new Date('1970-01-01').getTime();
|
|
||||||
|
|
||||||
export default function checkRequestExpiry(timestamp: number, log: Logger) {
|
|
||||||
// If timestamp is before epochTime, the request is invalid and return
|
|
||||||
// errors.AccessDenied
|
|
||||||
if (timestamp < epochTime) {
|
|
||||||
log.debug('request time is invalid', { timestamp });
|
|
||||||
return errors.AccessDenied;
|
|
||||||
}
|
|
||||||
// If timestamp is not within 15 minutes of current time, or if
|
|
||||||
// timestamp is more than 15 minutes in the future, the request
|
|
||||||
// has expired and return errors.RequestTimeTooSkewed
|
|
||||||
const currentTime = Date.now();
|
|
||||||
log.trace('request timestamp', { requestTimestamp: timestamp });
|
|
||||||
log.trace('current timestamp', { currentTimestamp: currentTime });
|
|
||||||
|
|
||||||
const fifteenMinutes = (15 * 60 * 1000);
|
|
||||||
if (currentTime - timestamp > fifteenMinutes) {
|
|
||||||
log.trace('request timestamp is not within 15 minutes of current time');
|
|
||||||
log.debug('request time too skewed', { timestamp });
|
|
||||||
return errors.RequestTimeTooSkewed;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (currentTime + fifteenMinutes < timestamp) {
|
|
||||||
log.trace('request timestamp is more than 15 minutes into future');
|
|
||||||
log.debug('request time too skewed', { timestamp });
|
|
||||||
return errors.RequestTimeTooSkewed;
|
|
||||||
}
|
|
||||||
|
|
||||||
return undefined;
|
|
||||||
}
|
|
|
@ -1,47 +0,0 @@
|
||||||
import { Logger } from 'werelogs';
|
|
||||||
import utf8 from 'utf8';
|
|
||||||
import getCanonicalizedAmzHeaders from './getCanonicalizedAmzHeaders';
|
|
||||||
import getCanonicalizedResource from './getCanonicalizedResource';
|
|
||||||
|
|
||||||
export default function constructStringToSign(
|
|
||||||
request: any,
|
|
||||||
data: { [key: string]: string },
|
|
||||||
log: Logger,
|
|
||||||
clientType?: any
|
|
||||||
) {
|
|
||||||
/*
|
|
||||||
Build signature per AWS requirements:
|
|
||||||
StringToSign = HTTP-Verb + '\n' +
|
|
||||||
Content-MD5 + '\n' +
|
|
||||||
Content-Type + '\n' +
|
|
||||||
Date (or Expiration for query Auth) + '\n' +
|
|
||||||
CanonicalizedAmzHeaders +
|
|
||||||
CanonicalizedResource;
|
|
||||||
*/
|
|
||||||
log.trace('constructing string to sign');
|
|
||||||
|
|
||||||
let stringToSign = `${request.method}\n`;
|
|
||||||
const headers = request.headers;
|
|
||||||
const query = data;
|
|
||||||
|
|
||||||
const contentMD5 = headers['content-md5'] ?
|
|
||||||
headers['content-md5'] : query['Content-MD5'];
|
|
||||||
stringToSign += (contentMD5 ? `${contentMD5}\n` : '\n');
|
|
||||||
|
|
||||||
const contentType = headers['content-type'] ?
|
|
||||||
headers['content-type'] : query['Content-Type'];
|
|
||||||
stringToSign += (contentType ? `${contentType}\n` : '\n');
|
|
||||||
|
|
||||||
/*
|
|
||||||
AWS docs are conflicting on whether to include x-amz-date header here
|
|
||||||
if present in request.
|
|
||||||
s3cmd includes x-amz-date in amzHeaders rather
|
|
||||||
than here in stringToSign so we have replicated that.
|
|
||||||
*/
|
|
||||||
const date = query.Expires ? query.Expires : headers.date;
|
|
||||||
const combinedQueryHeaders = Object.assign({}, headers, query);
|
|
||||||
stringToSign += (date ? `${date}\n` : '\n')
|
|
||||||
+ getCanonicalizedAmzHeaders(combinedQueryHeaders, clientType)
|
|
||||||
+ getCanonicalizedResource(request, clientType);
|
|
||||||
return utf8.encode(stringToSign);
|
|
||||||
}
|
|
|
@ -1,43 +0,0 @@
|
||||||
export default function getCanonicalizedAmzHeaders(headers: Headers, clientType: string) {
|
|
||||||
/*
|
|
||||||
Iterate through headers and pull any headers that are x-amz headers.
|
|
||||||
Need to include 'x-amz-date' here even though AWS docs
|
|
||||||
ambiguous on this.
|
|
||||||
*/
|
|
||||||
const filterFn = clientType === 'GCP' ?
|
|
||||||
(val: string) => val.substr(0, 7) === 'x-goog-' :
|
|
||||||
(val: string) => val.substr(0, 6) === 'x-amz-';
|
|
||||||
const amzHeaders = Object.keys(headers)
|
|
||||||
.filter(filterFn)
|
|
||||||
.map(val => [val.trim(), headers[val].trim()]);
|
|
||||||
/*
|
|
||||||
AWS docs state that duplicate headers should be combined
|
|
||||||
in the same header with values concatenated with
|
|
||||||
a comma separation.
|
|
||||||
Node combines duplicate headers and concatenates the values
|
|
||||||
with a comma AND SPACE separation.
|
|
||||||
Could replace all occurrences of ', ' with ',' but this
|
|
||||||
would remove spaces that might be desired
|
|
||||||
(for instance, in date header).
|
|
||||||
Opted to proceed without this parsing since it does not appear
|
|
||||||
that the AWS clients use duplicate headers.
|
|
||||||
*/
|
|
||||||
|
|
||||||
// If there are no amz headers, just return an empty string
|
|
||||||
if (amzHeaders.length === 0) {
|
|
||||||
return '';
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Sort the amz headers by key (first item in tuple)
|
|
||||||
amzHeaders.sort((a, b) => {
|
|
||||||
if (a[0] > b[0]) {
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
return -1;
|
|
||||||
});
|
|
||||||
// Build headerString
|
|
||||||
return amzHeaders.reduce((headerStr, current) =>
|
|
||||||
`${headerStr}${current[0]}:${current[1]}\n`,
|
|
||||||
'');
|
|
||||||
}
|
|
|
@ -1,117 +0,0 @@
|
||||||
import * as url from 'url';
|
|
||||||
|
|
||||||
const gcpSubresources = [
|
|
||||||
'acl',
|
|
||||||
'billing',
|
|
||||||
'compose',
|
|
||||||
'cors',
|
|
||||||
'encryption',
|
|
||||||
'lifecycle',
|
|
||||||
'location',
|
|
||||||
'logging',
|
|
||||||
'storageClass',
|
|
||||||
'tagging',
|
|
||||||
'upload_id',
|
|
||||||
'versioning',
|
|
||||||
'versions',
|
|
||||||
'websiteConfig',
|
|
||||||
];
|
|
||||||
|
|
||||||
const awsSubresources = [
|
|
||||||
'acl',
|
|
||||||
'cors',
|
|
||||||
'delete',
|
|
||||||
'lifecycle',
|
|
||||||
'location',
|
|
||||||
'logging',
|
|
||||||
'notification',
|
|
||||||
'partNumber',
|
|
||||||
'policy',
|
|
||||||
'requestPayment',
|
|
||||||
'tagging',
|
|
||||||
'torrent',
|
|
||||||
'uploadId',
|
|
||||||
'uploads',
|
|
||||||
'versionId',
|
|
||||||
'versioning',
|
|
||||||
'replication',
|
|
||||||
'versions',
|
|
||||||
'website',
|
|
||||||
];
|
|
||||||
|
|
||||||
export default function getCanonicalizedResource(request: any, clientType: string) {
|
|
||||||
/*
|
|
||||||
This variable is used to determine whether to insert
|
|
||||||
a '?' or '&'. Once a query parameter is added to the resourceString,
|
|
||||||
it changes to '&' before any new query parameter is added.
|
|
||||||
*/
|
|
||||||
let queryChar = '?';
|
|
||||||
// If bucket specified in hostname, add to resourceString
|
|
||||||
let resourceString = request.gotBucketNameFromHost ?
|
|
||||||
`/${request.bucketName}` : '';
|
|
||||||
// Add the path to the resourceString
|
|
||||||
resourceString += url.parse(request.url).pathname;
|
|
||||||
|
|
||||||
/*
|
|
||||||
If request includes a specified subresource,
|
|
||||||
add to the resourceString: (a) a '?', (b) the subresource,
|
|
||||||
and (c) its value (if any).
|
|
||||||
Separate multiple subresources with '&'.
|
|
||||||
Subresources must be in alphabetical order.
|
|
||||||
*/
|
|
||||||
|
|
||||||
// Specified subresources:
|
|
||||||
const subresources =
|
|
||||||
clientType === 'GCP' ? gcpSubresources : awsSubresources;
|
|
||||||
|
|
||||||
/*
|
|
||||||
If the request includes parameters in the query string,
|
|
||||||
that override the headers, include
|
|
||||||
them in the resourceString
|
|
||||||
along with their values.
|
|
||||||
AWS is ambiguous about format. Used alphabetical order.
|
|
||||||
*/
|
|
||||||
const overridingParams = [
|
|
||||||
'response-cache-control',
|
|
||||||
'response-content-disposition',
|
|
||||||
'response-content-encoding',
|
|
||||||
'response-content-language',
|
|
||||||
'response-content-type',
|
|
||||||
'response-expires',
|
|
||||||
];
|
|
||||||
|
|
||||||
// Check which specified subresources are present in query string,
|
|
||||||
// build array with them
|
|
||||||
const query = request.query;
|
|
||||||
const presentSubresources = Object.keys(query).filter(val =>
|
|
||||||
subresources.indexOf(val) !== -1);
|
|
||||||
// Sort the array and add the subresources and their value (if any)
|
|
||||||
// to the resourceString
|
|
||||||
presentSubresources.sort();
|
|
||||||
resourceString = presentSubresources.reduce((prev, current) => {
|
|
||||||
const ch = (query[current] !== '' ? '=' : '');
|
|
||||||
const ret = `${prev}${queryChar}${current}${ch}${query[current]}`;
|
|
||||||
queryChar = '&';
|
|
||||||
return ret;
|
|
||||||
}, resourceString);
|
|
||||||
// Add the overriding parameters to our resourceString
|
|
||||||
resourceString = overridingParams.reduce((prev, current) => {
|
|
||||||
if (query[current]) {
|
|
||||||
const ret = `${prev}${queryChar}${current}=${query[current]}`;
|
|
||||||
queryChar = '&';
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
return prev;
|
|
||||||
}, resourceString);
|
|
||||||
|
|
||||||
/*
|
|
||||||
Per AWS, the delete query string parameter must be included when
|
|
||||||
you create the CanonicalizedResource for a multi-object Delete request.
|
|
||||||
Unclear what this means for a single item delete request.
|
|
||||||
*/
|
|
||||||
if (request.query.delete) {
|
|
||||||
// Addresses adding '?' instead of '&' if no other params added.
|
|
||||||
resourceString += `${queryChar}delete=${query.delete}`;
|
|
||||||
}
|
|
||||||
return resourceString;
|
|
||||||
}
|
|
|
@ -1,82 +0,0 @@
|
||||||
import { Logger } from 'werelogs';
|
|
||||||
import errors from '../../errors';
|
|
||||||
import * as constants from '../../constants';
|
|
||||||
import constructStringToSign from './constructStringToSign';
|
|
||||||
import checkRequestExpiry from './checkRequestExpiry';
|
|
||||||
import algoCheck from './algoCheck';
|
|
||||||
|
|
||||||
export function check(request: any, log: Logger, data: { [key: string]: string }) {
|
|
||||||
log.trace('running header auth check');
|
|
||||||
const headers = request.headers;
|
|
||||||
|
|
||||||
const token = headers['x-amz-security-token'];
|
|
||||||
if (token && !constants.iamSecurityToken.pattern.test(token)) {
|
|
||||||
log.debug('invalid security token', { token });
|
|
||||||
return { err: errors.InvalidToken };
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check to make sure timestamp is within 15 minutes of current time
|
|
||||||
let timestamp = headers['x-amz-date'] ?
|
|
||||||
headers['x-amz-date'] : headers.date;
|
|
||||||
timestamp = Date.parse(timestamp);
|
|
||||||
if (!timestamp) {
|
|
||||||
log.debug('missing or invalid date header',
|
|
||||||
{ method: 'auth/v2/headerAuthCheck.check' });
|
|
||||||
return { err: errors.AccessDenied.
|
|
||||||
customizeDescription('Authentication requires a valid Date or ' +
|
|
||||||
'x-amz-date header') };
|
|
||||||
}
|
|
||||||
|
|
||||||
const err = checkRequestExpiry(timestamp, log);
|
|
||||||
if (err) {
|
|
||||||
return { err };
|
|
||||||
}
|
|
||||||
|
|
||||||
// Authorization Header should be
|
|
||||||
// in the format of 'AWS AccessKey:Signature'
|
|
||||||
const authInfo = headers.authorization;
|
|
||||||
|
|
||||||
if (!authInfo) {
|
|
||||||
log.debug('missing authorization security header');
|
|
||||||
return { err: errors.MissingSecurityHeader };
|
|
||||||
}
|
|
||||||
const semicolonIndex = authInfo.indexOf(':');
|
|
||||||
if (semicolonIndex < 0) {
|
|
||||||
log.debug('invalid authorization header', { authInfo });
|
|
||||||
return { err: errors.InvalidArgument };
|
|
||||||
}
|
|
||||||
const accessKey = semicolonIndex > 4 ?
|
|
||||||
authInfo.substring(4, semicolonIndex).trim() : undefined;
|
|
||||||
if (typeof accessKey !== 'string' || accessKey.length === 0) {
|
|
||||||
log.trace('invalid authorization header', { authInfo });
|
|
||||||
return { err: errors.MissingSecurityHeader };
|
|
||||||
}
|
|
||||||
// @ts-ignore
|
|
||||||
log.addDefaultFields({ accessKey });
|
|
||||||
|
|
||||||
const signatureFromRequest = authInfo.substring(semicolonIndex + 1).trim();
|
|
||||||
log.trace('signature from request', { signatureFromRequest });
|
|
||||||
const stringToSign = constructStringToSign(request, data, log);
|
|
||||||
log.trace('constructed string to sign', { stringToSign });
|
|
||||||
const algo = algoCheck(signatureFromRequest.length);
|
|
||||||
log.trace('algo for calculating signature', { algo });
|
|
||||||
if (algo === undefined) {
|
|
||||||
return { err: errors.InvalidArgument };
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
err: null,
|
|
||||||
params: {
|
|
||||||
version: 2,
|
|
||||||
data: {
|
|
||||||
accessKey,
|
|
||||||
signatureFromRequest,
|
|
||||||
stringToSign,
|
|
||||||
algo,
|
|
||||||
authType: 'REST-HEADER',
|
|
||||||
signatureVersion: 'AWS',
|
|
||||||
signatureAge: Date.now() - timestamp,
|
|
||||||
securityToken: token,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
|
@ -1,84 +0,0 @@
|
||||||
import { Logger } from 'werelogs';
|
|
||||||
import errors from '../../errors';
|
|
||||||
import * as constants from '../../constants';
|
|
||||||
import algoCheck from './algoCheck';
|
|
||||||
import constructStringToSign from './constructStringToSign';
|
|
||||||
|
|
||||||
export function check(request: any, log: Logger, data: { [key: string]: string }) {
|
|
||||||
log.trace('running query auth check');
|
|
||||||
if (request.method === 'POST') {
|
|
||||||
log.debug('query string auth not supported for post requests');
|
|
||||||
return { err: errors.NotImplemented };
|
|
||||||
}
|
|
||||||
|
|
||||||
const token = data.SecurityToken;
|
|
||||||
if (token && !constants.iamSecurityToken.pattern.test(token)) {
|
|
||||||
log.debug('invalid security token', { token });
|
|
||||||
return { err: errors.InvalidToken };
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
Check whether request has expired or if
|
|
||||||
expires parameter is more than 604800000 milliseconds
|
|
||||||
(7 days) in the future.
|
|
||||||
Expires time is provided in seconds so need to
|
|
||||||
multiply by 1000 to obtain
|
|
||||||
milliseconds to compare to Date.now()
|
|
||||||
*/
|
|
||||||
const expirationTime = parseInt(data.Expires, 10) * 1000;
|
|
||||||
if (Number.isNaN(expirationTime)) {
|
|
||||||
log.debug('invalid expires parameter',
|
|
||||||
{ expires: data.Expires });
|
|
||||||
return { err: errors.MissingSecurityHeader };
|
|
||||||
}
|
|
||||||
|
|
||||||
const currentTime = Date.now();
|
|
||||||
|
|
||||||
const preSignedURLExpiry = process.env.PRE_SIGN_URL_EXPIRY
|
|
||||||
&& !Number.isNaN(process.env.PRE_SIGN_URL_EXPIRY)
|
|
||||||
? Number.parseInt(process.env.PRE_SIGN_URL_EXPIRY, 10)
|
|
||||||
: constants.defaultPreSignedURLExpiry * 1000;
|
|
||||||
|
|
||||||
if (expirationTime > currentTime + preSignedURLExpiry) {
|
|
||||||
log.debug('expires parameter too far in future',
|
|
||||||
{ expires: request.query.Expires });
|
|
||||||
return { err: errors.AccessDenied };
|
|
||||||
}
|
|
||||||
if (currentTime > expirationTime) {
|
|
||||||
log.debug('current time exceeds expires time',
|
|
||||||
{ expires: request.query.Expires });
|
|
||||||
return { err: errors.RequestTimeTooSkewed };
|
|
||||||
}
|
|
||||||
const accessKey = data.AWSAccessKeyId;
|
|
||||||
// @ts-ignore
|
|
||||||
log.addDefaultFields({ accessKey });
|
|
||||||
|
|
||||||
const signatureFromRequest = decodeURIComponent(data.Signature);
|
|
||||||
log.trace('signature from request', { signatureFromRequest });
|
|
||||||
if (!accessKey || !signatureFromRequest) {
|
|
||||||
log.debug('invalid access key/signature parameters');
|
|
||||||
return { err: errors.MissingSecurityHeader };
|
|
||||||
}
|
|
||||||
const stringToSign = constructStringToSign(request, data, log);
|
|
||||||
log.trace('constructed string to sign', { stringToSign });
|
|
||||||
const algo = algoCheck(signatureFromRequest.length);
|
|
||||||
log.trace('algo for calculating signature', { algo });
|
|
||||||
if (algo === undefined) {
|
|
||||||
return { err: errors.InvalidArgument };
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
err: null,
|
|
||||||
params: {
|
|
||||||
version: 2,
|
|
||||||
data: {
|
|
||||||
accessKey,
|
|
||||||
signatureFromRequest,
|
|
||||||
stringToSign,
|
|
||||||
algo,
|
|
||||||
authType: 'REST-QUERY-STRING',
|
|
||||||
signatureVersion: 'AWS',
|
|
||||||
securityToken: token,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
|
@ -1,2 +0,0 @@
|
||||||
export * as header from './headerAuthCheck';
|
|
||||||
export * as query from './queryAuthCheck';
|
|
|
@ -1,81 +0,0 @@
|
||||||
/*
|
|
||||||
AWS's URI encoding rules:
|
|
||||||
URI encode every byte. Uri-Encode() must enforce the following rules:
|
|
||||||
|
|
||||||
URI encode every byte except the unreserved characters:
|
|
||||||
'A'-'Z', 'a'-'z', '0'-'9', '-', '.', '_', and '~'.
|
|
||||||
The space character is a reserved character and must be
|
|
||||||
encoded as "%20" (and not as "+").
|
|
||||||
Each Uri-encoded byte is formed by a '%' and the two-digit
|
|
||||||
hexadecimal value of the byte.
|
|
||||||
Letters in the hexadecimal value must be uppercase, for example "%1A".
|
|
||||||
Encode the forward slash character, '/',
|
|
||||||
everywhere except in the object key name.
|
|
||||||
For example, if the object key name is photos/Jan/sample.jpg,
|
|
||||||
the forward slash in the key name is not encoded.
|
|
||||||
See http://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-header-based-auth.html
|
|
||||||
*/
|
|
||||||
|
|
||||||
// converts utf8 character to hex and pads "%" before every two hex digits
|
|
||||||
function _toHexUTF8(char: string) {
|
|
||||||
const hexRep = Buffer.from(char, 'utf8').toString('hex').toUpperCase();
|
|
||||||
let res = '';
|
|
||||||
hexRep.split('').forEach((v, n) => {
|
|
||||||
// pad % before every 2 hex digits
|
|
||||||
if (n % 2 === 0) {
|
|
||||||
res += '%';
|
|
||||||
}
|
|
||||||
res += v;
|
|
||||||
});
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
export default function awsURIencode(
|
|
||||||
input: string,
|
|
||||||
encodeSlash?: boolean,
|
|
||||||
noEncodeStar?: boolean
|
|
||||||
) {
|
|
||||||
/**
|
|
||||||
* Duplicate query params are not suppported by AWS S3 APIs. These params
|
|
||||||
* are parsed as Arrays by Node.js HTTP parser which breaks this method
|
|
||||||
*/
|
|
||||||
if (typeof input !== 'string') {
|
|
||||||
return '';
|
|
||||||
}
|
|
||||||
let encoded = "";
|
|
||||||
const slash = encodeSlash === undefined || encodeSlash ? '%2F' : '/';
|
|
||||||
const star = noEncodeStar !== undefined && noEncodeStar ? '*' : '%2A';
|
|
||||||
for (let i = 0; i < input.length; i++) {
|
|
||||||
let ch = input.charAt(i);
|
|
||||||
if ((ch >= 'A' && ch <= 'Z') ||
|
|
||||||
(ch >= 'a' && ch <= 'z') ||
|
|
||||||
(ch >= '0' && ch <= '9') ||
|
|
||||||
ch === '_' || ch === '-' ||
|
|
||||||
ch === '~' || ch === '.') {
|
|
||||||
encoded = encoded.concat(ch);
|
|
||||||
} else if (ch === ' ') {
|
|
||||||
encoded = encoded.concat('%20');
|
|
||||||
} else if (ch === '/') {
|
|
||||||
encoded = encoded.concat(slash);
|
|
||||||
} else if (ch === '*') {
|
|
||||||
encoded = encoded.concat(star);
|
|
||||||
} else {
|
|
||||||
if (ch >= '\uD800' && ch <= '\uDBFF') {
|
|
||||||
// If this character is a high surrogate peek the next character
|
|
||||||
// and join it with this one if the next character is a low
|
|
||||||
// surrogate.
|
|
||||||
// Otherwise the encoded URI will contain the two surrogates as
|
|
||||||
// two distinct UTF-8 sequences which is not valid UTF-8.
|
|
||||||
if (i + 1 < input.length) {
|
|
||||||
const ch2 = input.charAt(i + 1);
|
|
||||||
if (ch2 >= '\uDC00' && ch2 <= '\uDFFF') {
|
|
||||||
i++;
|
|
||||||
ch += ch2;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
encoded = encoded.concat(_toHexUTF8(ch));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return encoded;
|
|
||||||
}
|
|
|
@ -1,60 +0,0 @@
|
||||||
import * as crypto from 'crypto';
|
|
||||||
import { Logger } from 'werelogs';
|
|
||||||
import createCanonicalRequest from './createCanonicalRequest';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* constructStringToSign - creates V4 stringToSign
|
|
||||||
* @param {object} params - params object
|
|
||||||
* @returns {string} - stringToSign
|
|
||||||
*/
|
|
||||||
export default function constructStringToSign(params: {
|
|
||||||
request: any;
|
|
||||||
signedHeaders: any;
|
|
||||||
payloadChecksum: any;
|
|
||||||
credentialScope: string;
|
|
||||||
timestamp: string;
|
|
||||||
query: { [key: string]: string };
|
|
||||||
log?: Logger;
|
|
||||||
proxyPath?: string;
|
|
||||||
awsService: string;
|
|
||||||
}): string | Error {
|
|
||||||
const {
|
|
||||||
request,
|
|
||||||
signedHeaders,
|
|
||||||
payloadChecksum,
|
|
||||||
credentialScope,
|
|
||||||
timestamp,
|
|
||||||
query,
|
|
||||||
log,
|
|
||||||
proxyPath,
|
|
||||||
} = params;
|
|
||||||
const path = proxyPath || request.path;
|
|
||||||
|
|
||||||
const canonicalReqResult = createCanonicalRequest({
|
|
||||||
pHttpVerb: request.method,
|
|
||||||
pResource: path,
|
|
||||||
pQuery: query,
|
|
||||||
pHeaders: request.headers,
|
|
||||||
pSignedHeaders: signedHeaders,
|
|
||||||
payloadChecksum,
|
|
||||||
service: params.awsService,
|
|
||||||
});
|
|
||||||
|
|
||||||
// TODO Why that line?
|
|
||||||
// @ts-ignore
|
|
||||||
if (canonicalReqResult instanceof Error) {
|
|
||||||
if (log) {
|
|
||||||
log.error('error creating canonicalRequest');
|
|
||||||
}
|
|
||||||
return canonicalReqResult;
|
|
||||||
}
|
|
||||||
if (log) {
|
|
||||||
log.debug('constructed canonicalRequest', { canonicalReqResult });
|
|
||||||
}
|
|
||||||
const sha256 = crypto.createHash('sha256');
|
|
||||||
const canonicalHex = sha256.update(canonicalReqResult, 'binary')
|
|
||||||
.digest('hex');
|
|
||||||
const stringToSign = `AWS4-HMAC-SHA256\n${timestamp}\n` +
|
|
||||||
`${credentialScope}\n${canonicalHex}`;
|
|
||||||
return stringToSign;
|
|
||||||
}
|
|
|
@ -1,95 +0,0 @@
|
||||||
import * as crypto from 'crypto';
|
|
||||||
import * as queryString from 'querystring';
|
|
||||||
import awsURIencode from './awsURIencode';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* createCanonicalRequest - creates V4 canonical request
|
|
||||||
* @param params - contains pHttpVerb (request type),
|
|
||||||
* pResource (parsed from URL), pQuery (request query),
|
|
||||||
* pHeaders (request headers), pSignedHeaders (signed headers from request),
|
|
||||||
* payloadChecksum (from request)
|
|
||||||
* @returns - canonicalRequest
|
|
||||||
*/
|
|
||||||
export default function createCanonicalRequest(
|
|
||||||
params: {
|
|
||||||
pHttpVerb: string;
|
|
||||||
pResource: string;
|
|
||||||
pQuery: { [key: string]: string };
|
|
||||||
pHeaders: any;
|
|
||||||
pSignedHeaders: any;
|
|
||||||
service: string;
|
|
||||||
payloadChecksum: string;
|
|
||||||
}
|
|
||||||
) {
|
|
||||||
const pHttpVerb = params.pHttpVerb;
|
|
||||||
const pResource = params.pResource;
|
|
||||||
const pQuery = params.pQuery;
|
|
||||||
const pHeaders = params.pHeaders;
|
|
||||||
const pSignedHeaders = params.pSignedHeaders;
|
|
||||||
const service = params.service;
|
|
||||||
let payloadChecksum = params.payloadChecksum;
|
|
||||||
if (!payloadChecksum) {
|
|
||||||
if (pHttpVerb === 'GET') {
|
|
||||||
payloadChecksum = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b' +
|
|
||||||
'934ca495991b7852b855';
|
|
||||||
} else if (pHttpVerb === 'POST') {
|
|
||||||
let notEncodeStar = false;
|
|
||||||
// The java sdk does not encode the '*' parameter to compute the
|
|
||||||
// signature, if the user-agent is recognized, we need to keep
|
|
||||||
// the plain '*' as well.
|
|
||||||
if (/aws-sdk-java\/[0-9.]+/.test(pHeaders['user-agent'])) {
|
|
||||||
notEncodeStar = true;
|
|
||||||
}
|
|
||||||
let payload = queryString.stringify(pQuery, undefined, undefined, {
|
|
||||||
encodeURIComponent: input => awsURIencode(input, true,
|
|
||||||
notEncodeStar),
|
|
||||||
});
|
|
||||||
payload = payload.replace(/%20/g, '+');
|
|
||||||
payloadChecksum = crypto.createHash('sha256')
|
|
||||||
.update(payload, 'binary').digest('hex').toLowerCase();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const canonicalURI = !!pResource ? awsURIencode(pResource, false) : '/';
|
|
||||||
|
|
||||||
// canonical query string
|
|
||||||
let canonicalQueryStr = '';
|
|
||||||
if (pQuery && !((service === 'iam' || service === 'ring' ||
|
|
||||||
service === 'sts') &&
|
|
||||||
pHttpVerb === 'POST')) {
|
|
||||||
const sortedQueryParams = Object.keys(pQuery).sort().map(key => {
|
|
||||||
const encodedKey = awsURIencode(key);
|
|
||||||
const value = pQuery[key] ? awsURIencode(pQuery[key]) : '';
|
|
||||||
return `${encodedKey}=${value}`;
|
|
||||||
});
|
|
||||||
canonicalQueryStr = sortedQueryParams.join('&');
|
|
||||||
}
|
|
||||||
|
|
||||||
// signed headers
|
|
||||||
const signedHeadersList = pSignedHeaders.split(';');
|
|
||||||
signedHeadersList.sort((a: any, b: any) => a.localeCompare(b));
|
|
||||||
const signedHeaders = signedHeadersList.join(';');
|
|
||||||
|
|
||||||
// canonical headers
|
|
||||||
const canonicalHeadersList = signedHeadersList.map((signedHeader: any) => {
|
|
||||||
if (pHeaders[signedHeader] !== undefined) {
|
|
||||||
const trimmedHeader = pHeaders[signedHeader]
|
|
||||||
.trim().replace(/\s+/g, ' ');
|
|
||||||
return `${signedHeader}:${trimmedHeader}\n`;
|
|
||||||
}
|
|
||||||
// nginx will strip the actual expect header so add value of
|
|
||||||
// header back here if it was included as a signed header
|
|
||||||
if (signedHeader === 'expect') {
|
|
||||||
return `${signedHeader}:100-continue\n`;
|
|
||||||
}
|
|
||||||
// handle case where signed 'header' is actually query param
|
|
||||||
return `${signedHeader}:${pQuery[signedHeader]}\n`;
|
|
||||||
});
|
|
||||||
|
|
||||||
const canonicalHeaders = canonicalHeadersList.join('');
|
|
||||||
|
|
||||||
const canonicalRequest = `${pHttpVerb}\n${canonicalURI}\n` +
|
|
||||||
`${canonicalQueryStr}\n${canonicalHeaders}\n` +
|
|
||||||
`${signedHeaders}\n${payloadChecksum}`;
|
|
||||||
return canonicalRequest;
|
|
||||||
}
|
|
|
@ -1,185 +0,0 @@
|
||||||
import { Logger } from 'werelogs';
|
|
||||||
import errors from '../../../lib/errors';
|
|
||||||
import * as constants from '../../constants';
|
|
||||||
import constructStringToSign from './constructStringToSign';
|
|
||||||
import {
|
|
||||||
checkTimeSkew,
|
|
||||||
convertUTCtoISO8601,
|
|
||||||
convertAmzTimeToMs,
|
|
||||||
} from './timeUtils';
|
|
||||||
import {
|
|
||||||
extractAuthItems,
|
|
||||||
validateCredentials,
|
|
||||||
areSignedHeadersComplete,
|
|
||||||
} from './validateInputs';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* V4 header auth check
|
|
||||||
* @param request - HTTP request object
|
|
||||||
* @param log - logging object
|
|
||||||
* @param data - Parameters from queryString parsing or body of
|
|
||||||
* POST request
|
|
||||||
* @param awsService - Aws service ('iam' or 's3')
|
|
||||||
*/
|
|
||||||
export function check(
|
|
||||||
request: any,
|
|
||||||
log: Logger,
|
|
||||||
data: { [key: string]: string },
|
|
||||||
awsService: string
|
|
||||||
) {
|
|
||||||
log.trace('running header auth check');
|
|
||||||
|
|
||||||
const token = request.headers['x-amz-security-token'];
|
|
||||||
if (token && !constants.iamSecurityToken.pattern.test(token)) {
|
|
||||||
log.debug('invalid security token', { token });
|
|
||||||
return { err: errors.InvalidToken };
|
|
||||||
}
|
|
||||||
|
|
||||||
// authorization header
|
|
||||||
const authHeader = request.headers.authorization;
|
|
||||||
if (!authHeader) {
|
|
||||||
log.debug('missing authorization header');
|
|
||||||
return { err: errors.MissingSecurityHeader };
|
|
||||||
}
|
|
||||||
|
|
||||||
const authHeaderItems = extractAuthItems(authHeader, log);
|
|
||||||
if (Object.keys(authHeaderItems).length < 3) {
|
|
||||||
log.debug('invalid authorization header', { authHeader });
|
|
||||||
return { err: errors.InvalidArgument };
|
|
||||||
}
|
|
||||||
|
|
||||||
const payloadChecksum = request.headers['x-amz-content-sha256'];
|
|
||||||
if (!payloadChecksum && awsService !== 'iam') {
|
|
||||||
log.debug('missing payload checksum');
|
|
||||||
return { err: errors.MissingSecurityHeader };
|
|
||||||
}
|
|
||||||
if (payloadChecksum === 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') {
|
|
||||||
log.trace('requesting streaming v4 auth');
|
|
||||||
if (request.method !== 'PUT') {
|
|
||||||
log.debug('streaming v4 auth for put only',
|
|
||||||
{ method: 'auth/v4/headerAuthCheck.check' });
|
|
||||||
return { err: errors.InvalidArgument };
|
|
||||||
}
|
|
||||||
if (!request.headers['x-amz-decoded-content-length']) {
|
|
||||||
return { err: errors.MissingSecurityHeader };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
log.trace('authorization header from request', { authHeader });
|
|
||||||
|
|
||||||
const signatureFromRequest = authHeaderItems.signatureFromRequest!;
|
|
||||||
const credentialsArr = authHeaderItems.credentialsArr!;
|
|
||||||
const signedHeaders = authHeaderItems.signedHeaders!;
|
|
||||||
|
|
||||||
if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
|
|
||||||
log.debug('signedHeaders are incomplete', { signedHeaders });
|
|
||||||
return { err: errors.AccessDenied };
|
|
||||||
}
|
|
||||||
|
|
||||||
let timestamp: string | undefined;
|
|
||||||
// check request timestamp
|
|
||||||
const xAmzDate = request.headers['x-amz-date'];
|
|
||||||
if (xAmzDate) {
|
|
||||||
const xAmzDateArr = xAmzDate.split('T');
|
|
||||||
// check that x-amz- date has the correct format and after epochTime
|
|
||||||
if (xAmzDateArr.length === 2 && xAmzDateArr[0].length === 8
|
|
||||||
&& xAmzDateArr[1].length === 7
|
|
||||||
&& Number.parseInt(xAmzDateArr[0], 10) > 19700101) {
|
|
||||||
// format of x-amz- date is ISO 8601: YYYYMMDDTHHMMSSZ
|
|
||||||
timestamp = request.headers['x-amz-date'];
|
|
||||||
}
|
|
||||||
} else if (request.headers.date) {
|
|
||||||
timestamp = convertUTCtoISO8601(request.headers.date);
|
|
||||||
}
|
|
||||||
if (!timestamp) {
|
|
||||||
log.debug('missing or invalid date header',
|
|
||||||
{ method: 'auth/v4/headerAuthCheck.check' });
|
|
||||||
return { err: errors.AccessDenied.
|
|
||||||
customizeDescription('Authentication requires a valid Date or ' +
|
|
||||||
'x-amz-date header') };
|
|
||||||
}
|
|
||||||
|
|
||||||
const validationResult = validateCredentials(credentialsArr, timestamp,
|
|
||||||
log);
|
|
||||||
if (validationResult instanceof Error) {
|
|
||||||
log.debug('credentials in improper format', { credentialsArr,
|
|
||||||
timestamp, validationResult });
|
|
||||||
return { err: validationResult };
|
|
||||||
}
|
|
||||||
// credentialsArr is [accessKey, date, region, aws-service, aws4_request]
|
|
||||||
const scopeDate = credentialsArr[1];
|
|
||||||
const region = credentialsArr[2];
|
|
||||||
const service = credentialsArr[3];
|
|
||||||
const accessKey = credentialsArr.shift();
|
|
||||||
const credentialScope = credentialsArr.join('/');
|
|
||||||
|
|
||||||
// In AWS Signature Version 4, the signing key is valid for up to seven days
|
|
||||||
// (see Introduction to Signing Requests.
|
|
||||||
// Therefore, a signature is also valid for up to seven days or
|
|
||||||
// less if specified by a bucket policy.
|
|
||||||
// Since policies are not yet implemented, we will have a 15
|
|
||||||
// minute default like in v2 Auth.
|
|
||||||
// See http://docs.aws.amazon.com/AmazonS3/latest/API/
|
|
||||||
// bucket-policy-s3-sigv4-conditions.html
|
|
||||||
// TODO: When implementing bucket policies,
|
|
||||||
// note that expiration can be shortened so
|
|
||||||
// expiry is as set out in the policy.
|
|
||||||
|
|
||||||
// 15 minutes in seconds
|
|
||||||
const expiry = (15 * 60);
|
|
||||||
const isTimeSkewed = checkTimeSkew(timestamp, expiry, log);
|
|
||||||
if (isTimeSkewed) {
|
|
||||||
return { err: errors.RequestTimeTooSkewed };
|
|
||||||
}
|
|
||||||
|
|
||||||
let proxyPath: string | undefined;
|
|
||||||
if (request.headers.proxy_path) {
|
|
||||||
try {
|
|
||||||
proxyPath = decodeURIComponent(request.headers.proxy_path);
|
|
||||||
} catch (err) {
|
|
||||||
log.debug('invalid proxy_path header', { proxyPath, err });
|
|
||||||
return { err: errors.InvalidArgument.customizeDescription(
|
|
||||||
'invalid proxy_path header') };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const stringToSign = constructStringToSign({
|
|
||||||
log,
|
|
||||||
request,
|
|
||||||
query: data,
|
|
||||||
signedHeaders,
|
|
||||||
credentialScope,
|
|
||||||
timestamp,
|
|
||||||
payloadChecksum,
|
|
||||||
awsService: service,
|
|
||||||
proxyPath,
|
|
||||||
});
|
|
||||||
log.trace('constructed stringToSign', { stringToSign });
|
|
||||||
if (stringToSign instanceof Error) {
|
|
||||||
return { err: stringToSign };
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
return {
|
|
||||||
err: null,
|
|
||||||
params: {
|
|
||||||
version: 4,
|
|
||||||
data: {
|
|
||||||
accessKey,
|
|
||||||
signatureFromRequest,
|
|
||||||
region,
|
|
||||||
service,
|
|
||||||
scopeDate,
|
|
||||||
stringToSign,
|
|
||||||
authType: 'REST-HEADER',
|
|
||||||
signatureVersion: 'AWS4-HMAC-SHA256',
|
|
||||||
signatureAge: Date.now() - convertAmzTimeToMs(timestamp),
|
|
||||||
// credentialScope and timestamp needed for streaming V4
|
|
||||||
// chunk evaluation
|
|
||||||
credentialScope,
|
|
||||||
timestamp,
|
|
||||||
securityToken: token,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
|
@ -1,118 +0,0 @@
|
||||||
import { Logger } from 'werelogs';
|
|
||||||
import * as constants from '../../constants';
|
|
||||||
import errors from '../../errors';
|
|
||||||
import constructStringToSign from './constructStringToSign';
|
|
||||||
import { checkTimeSkew, convertAmzTimeToMs } from './timeUtils';
|
|
||||||
import { validateCredentials, extractQueryParams } from './validateInputs';
|
|
||||||
import { areSignedHeadersComplete } from './validateInputs';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* V4 query auth check
|
|
||||||
* @param request - HTTP request object
|
|
||||||
* @param log - logging object
|
|
||||||
* @param data - Contain authentification params (GET or POST data)
|
|
||||||
*/
|
|
||||||
export function check(request: any, log: Logger, data: { [key: string]: string }) {
|
|
||||||
const authParams = extractQueryParams(data, log);
|
|
||||||
|
|
||||||
if (Object.keys(authParams).length !== 5) {
|
|
||||||
return { err: errors.InvalidArgument };
|
|
||||||
}
|
|
||||||
|
|
||||||
// Query params are not specified in AWS documentation as case-insensitive,
|
|
||||||
// so we use case-sensitive
|
|
||||||
const token = data['X-Amz-Security-Token'];
|
|
||||||
if (token && !constants.iamSecurityToken.pattern.test(token)) {
|
|
||||||
log.debug('invalid security token', { token });
|
|
||||||
return { err: errors.InvalidToken };
|
|
||||||
}
|
|
||||||
|
|
||||||
const signedHeaders = authParams.signedHeaders!;
|
|
||||||
const signatureFromRequest = authParams.signatureFromRequest!;
|
|
||||||
const timestamp = authParams.timestamp!;
|
|
||||||
const expiry = authParams.expiry!;
|
|
||||||
const credential = authParams.credential!;
|
|
||||||
|
|
||||||
if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
|
|
||||||
log.debug('signedHeaders are incomplete', { signedHeaders });
|
|
||||||
return { err: errors.AccessDenied };
|
|
||||||
}
|
|
||||||
|
|
||||||
const validationResult = validateCredentials(credential, timestamp,
|
|
||||||
log);
|
|
||||||
if (validationResult instanceof Error) {
|
|
||||||
log.debug('credentials in improper format', { credential,
|
|
||||||
timestamp, validationResult });
|
|
||||||
return { err: validationResult };
|
|
||||||
}
|
|
||||||
const accessKey = credential[0];
|
|
||||||
const scopeDate = credential[1];
|
|
||||||
const region = credential[2];
|
|
||||||
const service = credential[3];
|
|
||||||
const requestType = credential[4];
|
|
||||||
|
|
||||||
const isTimeSkewed = checkTimeSkew(timestamp, expiry, log);
|
|
||||||
if (isTimeSkewed) {
|
|
||||||
return { err: errors.RequestTimeTooSkewed };
|
|
||||||
}
|
|
||||||
|
|
||||||
let proxyPath: string | undefined;
|
|
||||||
if (request.headers.proxy_path) {
|
|
||||||
try {
|
|
||||||
proxyPath = decodeURIComponent(request.headers.proxy_path);
|
|
||||||
} catch (err) {
|
|
||||||
log.debug('invalid proxy_path header', { proxyPath });
|
|
||||||
return { err: errors.InvalidArgument.customizeDescription(
|
|
||||||
'invalid proxy_path header') };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// In query v4 auth, the canonical request needs
|
|
||||||
// to include the query params OTHER THAN
|
|
||||||
// the signature so create a
|
|
||||||
// copy of the query object and remove
|
|
||||||
// the X-Amz-Signature property.
|
|
||||||
const queryWithoutSignature = Object.assign({}, data);
|
|
||||||
delete queryWithoutSignature['X-Amz-Signature'];
|
|
||||||
|
|
||||||
// For query auth, instead of a
|
|
||||||
// checksum of the contents, the
|
|
||||||
// string 'UNSIGNED-PAYLOAD' should be
|
|
||||||
// added to the canonicalRequest in
|
|
||||||
// building string to sign
|
|
||||||
const payloadChecksum = 'UNSIGNED-PAYLOAD';
|
|
||||||
|
|
||||||
const stringToSign = constructStringToSign({
|
|
||||||
log,
|
|
||||||
request,
|
|
||||||
query: queryWithoutSignature,
|
|
||||||
signedHeaders,
|
|
||||||
payloadChecksum,
|
|
||||||
timestamp,
|
|
||||||
credentialScope:
|
|
||||||
`${scopeDate}/${region}/${service}/${requestType}`,
|
|
||||||
awsService: service,
|
|
||||||
proxyPath,
|
|
||||||
});
|
|
||||||
if (stringToSign instanceof Error) {
|
|
||||||
return { err: stringToSign };
|
|
||||||
}
|
|
||||||
log.trace('constructed stringToSign', { stringToSign });
|
|
||||||
return {
|
|
||||||
err: null,
|
|
||||||
params: {
|
|
||||||
version: 4,
|
|
||||||
data: {
|
|
||||||
accessKey,
|
|
||||||
signatureFromRequest,
|
|
||||||
region,
|
|
||||||
scopeDate,
|
|
||||||
stringToSign,
|
|
||||||
authType: 'REST-QUERY-STRING',
|
|
||||||
signatureVersion: 'AWS4-HMAC-SHA256',
|
|
||||||
signatureAge: Date.now() - convertAmzTimeToMs(timestamp),
|
|
||||||
securityToken: token,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
|
@ -1,316 +0,0 @@
|
||||||
import { Transform } from 'stream';
|
|
||||||
import async from 'async';
|
|
||||||
import errors from '../../../errors';
|
|
||||||
import { Logger } from 'werelogs';
|
|
||||||
import Vault, { AuthV4RequestParams } from '../../Vault';
|
|
||||||
import { Callback } from '../../backends/in_memory/types';
|
|
||||||
|
|
||||||
import constructChunkStringToSign from './constructChunkStringToSign';
|
|
||||||
|
|
||||||
export type TransformParams = {
|
|
||||||
accessKey: string;
|
|
||||||
signatureFromRequest: string;
|
|
||||||
region: string;
|
|
||||||
scopeDate: string;
|
|
||||||
timestamp: string;
|
|
||||||
credentialScope: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This class is designed to handle the chunks sent in a streaming
|
|
||||||
* v4 Auth request
|
|
||||||
*/
|
|
||||||
export default class V4Transform extends Transform {
|
|
||||||
log: Logger;
|
|
||||||
cb: Callback;
|
|
||||||
accessKey: string;
|
|
||||||
region: string;
|
|
||||||
scopeDate: string;
|
|
||||||
timestamp: string;
|
|
||||||
credentialScope: string;
|
|
||||||
lastSignature: string;
|
|
||||||
currentSignature?: string;
|
|
||||||
haveMetadata: boolean;
|
|
||||||
seekingDataSize: number;
|
|
||||||
currentData?: any;
|
|
||||||
dataCursor: number;
|
|
||||||
currentMetadata: any[];
|
|
||||||
lastPieceDone: boolean;
|
|
||||||
lastChunk: boolean;
|
|
||||||
vault: Vault;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param streamingV4Params - info for chunk authentication
|
|
||||||
* @param streamingV4Params.accessKey - requester's accessKey
|
|
||||||
* @param streamingV4Params.signatureFromRequest - signature
|
|
||||||
* sent with headers
|
|
||||||
* @param streamingV4Params.region - region sent with auth header
|
|
||||||
* @param streamingV4Params.scopeDate - date sent with auth header
|
|
||||||
* @param streamingV4Params.timestamp - date parsed from headers
|
|
||||||
* in ISO 8601 format: YYYYMMDDTHHMMSSZ
|
|
||||||
* @param streamingV4Params.credentialScope - items from auth
|
|
||||||
* header plus the string 'aws4_request' joined with '/':
|
|
||||||
* timestamp/region/aws-service/aws4_request
|
|
||||||
* @param vault - Vault instance passed from CloudServer
|
|
||||||
* @param log - logger object
|
|
||||||
* @param cb - callback to api
|
|
||||||
*/
|
|
||||||
constructor(
|
|
||||||
streamingV4Params: TransformParams,
|
|
||||||
vault: Vault,
|
|
||||||
log: Logger,
|
|
||||||
cb: Callback,
|
|
||||||
) {
|
|
||||||
const { accessKey, signatureFromRequest, region, scopeDate, timestamp,
|
|
||||||
credentialScope } = streamingV4Params;
|
|
||||||
super({});
|
|
||||||
this.log = log;
|
|
||||||
this.cb = cb;
|
|
||||||
this.accessKey = accessKey;
|
|
||||||
this.region = region;
|
|
||||||
this.scopeDate = scopeDate;
|
|
||||||
this.timestamp = timestamp;
|
|
||||||
this.credentialScope = credentialScope;
|
|
||||||
this.lastSignature = signatureFromRequest;
|
|
||||||
this.currentSignature = undefined;
|
|
||||||
this.haveMetadata = false;
|
|
||||||
// keep this as -1 to start since a seekingDataSize of 0
|
|
||||||
// means that chunk is just metadata (as is the case with the
|
|
||||||
// last chunk)
|
|
||||||
this.seekingDataSize = -1;
|
|
||||||
this.currentData = undefined;
|
|
||||||
this.dataCursor = 0;
|
|
||||||
this.currentMetadata = [];
|
|
||||||
this.lastPieceDone = false;
|
|
||||||
this.lastChunk = false;
|
|
||||||
this.vault = vault;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function will parse the metadata portion of the chunk
|
|
||||||
* @param remainingChunk - chunk sent from _transform
|
|
||||||
* @return response - if error, will return 'err' key with
|
|
||||||
* arsenal error value.
|
|
||||||
* if incomplete metadata, will return 'completeMetadata' key with
|
|
||||||
* value false
|
|
||||||
* if complete metadata received, will return 'completeMetadata' key with
|
|
||||||
* value true and the key 'unparsedChunk' with the remaining chunk without
|
|
||||||
* the parsed metadata piece
|
|
||||||
*/
|
|
||||||
_parseMetadata(remainingChunk: Buffer) {
|
|
||||||
let remainingPlusStoredMetadata = remainingChunk;
|
|
||||||
// have metadata pieces so need to add to the front of
|
|
||||||
// remainingChunk
|
|
||||||
if (this.currentMetadata.length > 0) {
|
|
||||||
this.currentMetadata.push(remainingChunk);
|
|
||||||
remainingPlusStoredMetadata = Buffer.concat(this.currentMetadata);
|
|
||||||
// zero out stored metadata
|
|
||||||
this.currentMetadata.length = 0;
|
|
||||||
}
|
|
||||||
let lineBreakIndex = remainingPlusStoredMetadata.indexOf('\r\n');
|
|
||||||
if (lineBreakIndex < 0) {
|
|
||||||
this.currentMetadata.push(remainingPlusStoredMetadata);
|
|
||||||
return { completeMetadata: false };
|
|
||||||
}
|
|
||||||
let fullMetadata = remainingPlusStoredMetadata.slice(0,
|
|
||||||
lineBreakIndex);
|
|
||||||
|
|
||||||
// handle extra line break on end of data chunk
|
|
||||||
if (fullMetadata.length === 0) {
|
|
||||||
const chunkWithoutLeadingLineBreak = remainingPlusStoredMetadata
|
|
||||||
.slice(2);
|
|
||||||
// find second line break
|
|
||||||
lineBreakIndex = chunkWithoutLeadingLineBreak.indexOf('\r\n');
|
|
||||||
if (lineBreakIndex < 0) {
|
|
||||||
this.currentMetadata.push(chunkWithoutLeadingLineBreak);
|
|
||||||
return { completeMetadata: false };
|
|
||||||
}
|
|
||||||
fullMetadata = chunkWithoutLeadingLineBreak.slice(0,
|
|
||||||
lineBreakIndex);
|
|
||||||
}
|
|
||||||
|
|
||||||
const splitMeta = fullMetadata.toString().split(';');
|
|
||||||
this.log.trace('parsed full metadata for chunk', { splitMeta });
|
|
||||||
if (splitMeta.length !== 2) {
|
|
||||||
this.log.trace('chunk body did not contain correct ' +
|
|
||||||
'metadata format');
|
|
||||||
return { err: errors.InvalidArgument };
|
|
||||||
}
|
|
||||||
// chunk-size is sent in hex
|
|
||||||
const dataSize = Number.parseInt(splitMeta[0], 16);
|
|
||||||
if (Number.isNaN(dataSize)) {
|
|
||||||
this.log.trace('chunk body did not contain valid size');
|
|
||||||
return { err: errors.InvalidArgument };
|
|
||||||
}
|
|
||||||
let chunkSig = splitMeta[1];
|
|
||||||
if (!chunkSig || chunkSig.indexOf('chunk-signature=') < 0) {
|
|
||||||
this.log.trace('chunk body did not contain correct sig format');
|
|
||||||
return { err: errors.InvalidArgument };
|
|
||||||
}
|
|
||||||
chunkSig = chunkSig.replace('chunk-signature=', '');
|
|
||||||
this.currentSignature = chunkSig;
|
|
||||||
this.haveMetadata = true;
|
|
||||||
if (dataSize === 0) {
|
|
||||||
this.lastChunk = true;
|
|
||||||
return {
|
|
||||||
completeMetadata: true,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
// + 2 to get \r\n at end
|
|
||||||
this.seekingDataSize = dataSize + 2;
|
|
||||||
this.currentData = Buffer.alloc(dataSize);
|
|
||||||
|
|
||||||
return {
|
|
||||||
completeMetadata: true,
|
|
||||||
// start slice at lineBreak plus 2 to remove line break at end of
|
|
||||||
// metadata piece since length of '\r\n' is 2
|
|
||||||
unparsedChunk: remainingPlusStoredMetadata
|
|
||||||
.slice(lineBreakIndex + 2),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Build the stringToSign and authenticate the chunk
|
|
||||||
* @param dataToSend - chunk sent from _transform or null
|
|
||||||
* if last chunk without data
|
|
||||||
* @param done - callback to _transform
|
|
||||||
* @return executes callback with err if applicable
|
|
||||||
*/
|
|
||||||
_authenticate(dataToSend: Buffer | null, done: Callback) {
|
|
||||||
// use prior sig to construct new string to sign
|
|
||||||
const stringToSign = constructChunkStringToSign(this.timestamp,
|
|
||||||
this.credentialScope, this.lastSignature, dataToSend ?? undefined);
|
|
||||||
this.log.trace('constructed chunk string to sign',
|
|
||||||
{ stringToSign });
|
|
||||||
// once used prior sig to construct string to sign, reassign
|
|
||||||
// lastSignature to current signature
|
|
||||||
this.lastSignature = this.currentSignature!;
|
|
||||||
const vaultParams: AuthV4RequestParams = {
|
|
||||||
log: this.log,
|
|
||||||
data: {
|
|
||||||
accessKey: this.accessKey,
|
|
||||||
signatureFromRequest: this.currentSignature!,
|
|
||||||
region: this.region,
|
|
||||||
scopeDate: this.scopeDate,
|
|
||||||
stringToSign,
|
|
||||||
// TODO FIXME This can not work
|
|
||||||
// @ts-expect-errors
|
|
||||||
timestamp: this.timestamp,
|
|
||||||
credentialScope: this.credentialScope,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
return this.vault.authenticateV4Request(vaultParams, null, err => {
|
|
||||||
if (err) {
|
|
||||||
this.log.trace('err from vault on streaming v4 auth',
|
|
||||||
{ error: err, paramsSentToVault: vaultParams.data });
|
|
||||||
return done(err);
|
|
||||||
}
|
|
||||||
return done();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function will parse the chunk into metadata and data,
|
|
||||||
* use the metadata to authenticate with vault and send the
|
|
||||||
* data on to be stored if authentication passes
|
|
||||||
*
|
|
||||||
* @param chunk - chunk from request body
|
|
||||||
* @param _encoding - Data encoding unused
|
|
||||||
* @param callback - Callback(err, justDataChunk, encoding)
|
|
||||||
* @return executes callback with err if applicable
|
|
||||||
*/
|
|
||||||
_transform(chunk: Buffer, _encoding: string, callback: Callback) {
|
|
||||||
// 'chunk' here is the node streaming chunk
|
|
||||||
// transfer-encoding chunks should be of the format:
|
|
||||||
// string(IntHexBase(chunk-size)) + ";chunk-signature=" +
|
|
||||||
// signature + \r\n + chunk-data + \r\n
|
|
||||||
// Last transfer-encoding chunk will have size 0 and no chunk-data.
|
|
||||||
|
|
||||||
if (this.lastPieceDone) {
|
|
||||||
const slice = chunk.slice(0, 10);
|
|
||||||
this.log.trace('received chunk after end.' +
|
|
||||||
'See first 10 bytes of chunk',
|
|
||||||
{ chunk: slice.toString() });
|
|
||||||
return callback();
|
|
||||||
}
|
|
||||||
let unparsedChunk = chunk;
|
|
||||||
let chunkLeftToEvaluate = true;
|
|
||||||
return async.whilst(
|
|
||||||
// test function
|
|
||||||
() => chunkLeftToEvaluate,
|
|
||||||
// async function
|
|
||||||
done => {
|
|
||||||
if (!this.haveMetadata) {
|
|
||||||
this.log.trace('do not have metadata so calling ' +
|
|
||||||
'_parseMetadata');
|
|
||||||
// need to parse our metadata
|
|
||||||
const parsedMetadataResults =
|
|
||||||
this._parseMetadata(unparsedChunk);
|
|
||||||
if (parsedMetadataResults.err) {
|
|
||||||
return done(parsedMetadataResults.err);
|
|
||||||
}
|
|
||||||
// if do not have full metadata get next chunk
|
|
||||||
if (!parsedMetadataResults.completeMetadata) {
|
|
||||||
chunkLeftToEvaluate = false;
|
|
||||||
return done();
|
|
||||||
}
|
|
||||||
// have metadata so reset unparsedChunk to remaining
|
|
||||||
// without metadata piece
|
|
||||||
// TODO Is that okay?
|
|
||||||
// @ts-expect-errors
|
|
||||||
unparsedChunk = parsedMetadataResults.unparsedChunk;
|
|
||||||
}
|
|
||||||
if (this.lastChunk) {
|
|
||||||
this.log.trace('authenticating final chunk with no data');
|
|
||||||
return this._authenticate(null, err => {
|
|
||||||
if (err) {
|
|
||||||
return done(err);
|
|
||||||
}
|
|
||||||
chunkLeftToEvaluate = false;
|
|
||||||
this.lastPieceDone = true;
|
|
||||||
return done();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (unparsedChunk.length < this.seekingDataSize) {
|
|
||||||
// add chunk to currentData and get next chunk
|
|
||||||
unparsedChunk.copy(this.currentData, this.dataCursor);
|
|
||||||
this.dataCursor += unparsedChunk.length;
|
|
||||||
this.seekingDataSize -= unparsedChunk.length;
|
|
||||||
chunkLeftToEvaluate = false;
|
|
||||||
return done();
|
|
||||||
}
|
|
||||||
// parse just the next data piece without \r\n at the end
|
|
||||||
// (therefore, minus 2)
|
|
||||||
const nextDataPiece =
|
|
||||||
unparsedChunk.slice(0, this.seekingDataSize - 2);
|
|
||||||
// add parsed data piece to other currentData pieces
|
|
||||||
// so that this.currentData is the full data piece
|
|
||||||
nextDataPiece.copy(this.currentData, this.dataCursor);
|
|
||||||
return this._authenticate(this.currentData, err => {
|
|
||||||
if (err) {
|
|
||||||
return done(err);
|
|
||||||
}
|
|
||||||
unparsedChunk =
|
|
||||||
unparsedChunk.slice(this.seekingDataSize);
|
|
||||||
this.push(this.currentData);
|
|
||||||
this.haveMetadata = false;
|
|
||||||
this.seekingDataSize = -1;
|
|
||||||
this.currentData = undefined;
|
|
||||||
this.dataCursor = 0;
|
|
||||||
chunkLeftToEvaluate = unparsedChunk.length > 0;
|
|
||||||
return done();
|
|
||||||
});
|
|
||||||
},
|
|
||||||
// final callback
|
|
||||||
err => {
|
|
||||||
if (err) {
|
|
||||||
return this.cb(err as any);
|
|
||||||
}
|
|
||||||
// get next chunk
|
|
||||||
return callback();
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,35 +0,0 @@
|
||||||
import * as crypto from 'crypto';
|
|
||||||
import * as constants from '../../../constants';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Constructs stringToSign for chunk
|
|
||||||
* @param timestamp - date parsed from headers
|
|
||||||
* in ISO 8601 format: YYYYMMDDTHHMMSSZ
|
|
||||||
* @param credentialScope - items from auth
|
|
||||||
* header plus the string 'aws4_request' joined with '/':
|
|
||||||
* timestamp/region/aws-service/aws4_request
|
|
||||||
* @param lastSignature - signature from headers or prior chunk
|
|
||||||
* @param justDataChunk - data portion of chunk
|
|
||||||
* @returns stringToSign
|
|
||||||
*/
|
|
||||||
export default function constructChunkStringToSign(
|
|
||||||
timestamp: string,
|
|
||||||
credentialScope: string,
|
|
||||||
lastSignature: string,
|
|
||||||
justDataChunk?: Buffer | string,
|
|
||||||
) {
|
|
||||||
let currentChunkHash: string;
|
|
||||||
// for last chunk, there will be no data, so use emptyStringHash
|
|
||||||
if (!justDataChunk) {
|
|
||||||
currentChunkHash = constants.emptyStringHash;
|
|
||||||
} else {
|
|
||||||
const hash = crypto.createHash('sha256');
|
|
||||||
const temp = justDataChunk instanceof Buffer
|
|
||||||
? hash.update(justDataChunk)
|
|
||||||
: hash.update(justDataChunk, 'binary');
|
|
||||||
currentChunkHash = temp.digest('hex');
|
|
||||||
}
|
|
||||||
return `AWS4-HMAC-SHA256-PAYLOAD\n${timestamp}\n` +
|
|
||||||
`${credentialScope}\n${lastSignature}\n` +
|
|
||||||
`${constants.emptyStringHash}\n${currentChunkHash}`;
|
|
||||||
}
|
|
|
@ -1,56 +0,0 @@
|
||||||
import { Logger } from 'werelogs';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert timestamp to milliseconds since Unix Epoch
|
|
||||||
* @param timestamp of ISO8601Timestamp format without
|
|
||||||
* dashes or colons, e.g. 20160202T220410Z
|
|
||||||
*/
|
|
||||||
export function convertAmzTimeToMs(timestamp: string) {
|
|
||||||
const arr = timestamp.split('');
|
|
||||||
// Convert to YYYY-MM-DDTHH:mm:ss.sssZ
|
|
||||||
const ISO8601time = `${arr.slice(0, 4).join('')}-${arr[4]}${arr[5]}` +
|
|
||||||
`-${arr.slice(6, 11).join('')}:${arr[11]}${arr[12]}:${arr[13]}` +
|
|
||||||
`${arr[14]}.000Z`;
|
|
||||||
return Date.parse(ISO8601time);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert UTC timestamp to ISO 8601 timestamp
|
|
||||||
* @param timestamp of UTC form: Fri, 10 Feb 2012 21:34:55 GMT
|
|
||||||
* @return ISO8601 timestamp of form: YYYYMMDDTHHMMSSZ
|
|
||||||
*/
|
|
||||||
export function convertUTCtoISO8601(timestamp: string | number) {
|
|
||||||
// convert to ISO string: YYYY-MM-DDTHH:mm:ss.sssZ.
|
|
||||||
const converted = new Date(timestamp).toISOString();
|
|
||||||
// Remove "-"s and "."s and milliseconds
|
|
||||||
return converted.split('.')[0].replace(/-|:/g, '').concat('Z');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check whether timestamp predates request or is too old
|
|
||||||
* @param timestamp of ISO8601Timestamp format without
|
|
||||||
* dashes or colons, e.g. 20160202T220410Z
|
|
||||||
* @param expiry - number of seconds signature should be valid
|
|
||||||
* @param log - log for request
|
|
||||||
* @return true if there is a time problem
|
|
||||||
*/
|
|
||||||
export function checkTimeSkew(timestamp: string, expiry: number, log: Logger) {
|
|
||||||
const currentTime = Date.now();
|
|
||||||
const fifteenMinutes = (15 * 60 * 1000);
|
|
||||||
const parsedTimestamp = convertAmzTimeToMs(timestamp);
|
|
||||||
if ((currentTime + fifteenMinutes) < parsedTimestamp) {
|
|
||||||
log.debug('current time pre-dates timestamp', {
|
|
||||||
parsedTimestamp,
|
|
||||||
currentTimeInMilliseconds: currentTime });
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
const expiryInMilliseconds = expiry * 1000;
|
|
||||||
if (currentTime > parsedTimestamp + expiryInMilliseconds) {
|
|
||||||
log.debug('signature has expired', {
|
|
||||||
parsedTimestamp,
|
|
||||||
expiry,
|
|
||||||
currentTimeInMilliseconds: currentTime });
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
|
@ -1,206 +0,0 @@
|
||||||
import { Logger } from 'werelogs';
|
|
||||||
import errors from '../../../lib/errors';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validate Credentials
|
|
||||||
* @param credentials - contains accessKey, scopeDate,
|
|
||||||
* region, service, requestType
|
|
||||||
* @param timestamp - timestamp from request in
|
|
||||||
* the format of ISO 8601: YYYYMMDDTHHMMSSZ
|
|
||||||
* @param log - logging object
|
|
||||||
*/
|
|
||||||
export function validateCredentials(
|
|
||||||
credentials: [string, string, string, string, string],
|
|
||||||
timestamp: string,
|
|
||||||
log: Logger
|
|
||||||
): Error | {} {
|
|
||||||
if (!Array.isArray(credentials) || credentials.length !== 5) {
|
|
||||||
log.warn('credentials in improper format', { credentials });
|
|
||||||
return errors.InvalidArgument;
|
|
||||||
}
|
|
||||||
// credentials[2] (region) is not read intentionally
|
|
||||||
const accessKey = credentials[0];
|
|
||||||
const scopeDate = credentials[1];
|
|
||||||
const service = credentials[3];
|
|
||||||
const requestType = credentials[4];
|
|
||||||
if (accessKey.length < 1) {
|
|
||||||
log.warn('accessKey provided is wrong format', { accessKey });
|
|
||||||
return errors.InvalidArgument;
|
|
||||||
}
|
|
||||||
// The scope date (format YYYYMMDD) must be same date as the timestamp
|
|
||||||
// on the request from the x-amz-date param (if queryAuthCheck)
|
|
||||||
// or from the x-amz-date header or date header (if headerAuthCheck)
|
|
||||||
// Format of timestamp is ISO 8601: YYYYMMDDTHHMMSSZ.
|
|
||||||
// http://docs.aws.amazon.com/AmazonS3/latest/API/
|
|
||||||
// sigv4-query-string-auth.html
|
|
||||||
// http://docs.aws.amazon.com/general/latest/gr/
|
|
||||||
// sigv4-date-handling.html
|
|
||||||
|
|
||||||
// convert timestamp to format of scopeDate YYYYMMDD
|
|
||||||
const timestampDate = timestamp.split('T')[0];
|
|
||||||
if (scopeDate.length !== 8 || scopeDate !== timestampDate) {
|
|
||||||
log.warn('scope date must be the same date as the timestamp date',
|
|
||||||
{ scopeDate, timestampDate });
|
|
||||||
return errors.RequestTimeTooSkewed;
|
|
||||||
}
|
|
||||||
if (service !== 's3' && service !== 'iam' && service !== 'ring' &&
|
|
||||||
service !== 'sts') {
|
|
||||||
log.warn('service in credentials is not one of s3/iam/ring/sts', {
|
|
||||||
service,
|
|
||||||
});
|
|
||||||
return errors.InvalidArgument;
|
|
||||||
}
|
|
||||||
if (requestType !== 'aws4_request') {
|
|
||||||
log.warn('requestType contained in params is not aws4_request',
|
|
||||||
{ requestType });
|
|
||||||
return errors.InvalidArgument;
|
|
||||||
}
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extract and validate components from query object
|
|
||||||
* @param queryObj - query object from request
|
|
||||||
* @param log - logging object
|
|
||||||
* @return object containing extracted query params for authV4
|
|
||||||
*/
|
|
||||||
export function extractQueryParams(
|
|
||||||
queryObj: { [key: string]: string | undefined },
|
|
||||||
log: Logger
|
|
||||||
) {
|
|
||||||
const authParams: {
|
|
||||||
signedHeaders?: string;
|
|
||||||
signatureFromRequest?: string;
|
|
||||||
timestamp?: string;
|
|
||||||
expiry?: number;
|
|
||||||
credential?: [string, string, string, string, string];
|
|
||||||
} = {};
|
|
||||||
|
|
||||||
// Do not need the algorithm sent back
|
|
||||||
if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') {
|
|
||||||
log.warn('algorithm param incorrect',
|
|
||||||
{ algo: queryObj['X-Amz-Algorithm'] });
|
|
||||||
return authParams;
|
|
||||||
}
|
|
||||||
|
|
||||||
const signedHeaders = queryObj['X-Amz-SignedHeaders'];
|
|
||||||
// At least "host" must be included in signed headers
|
|
||||||
if (signedHeaders && signedHeaders.length > 3) {
|
|
||||||
authParams.signedHeaders = signedHeaders;
|
|
||||||
} else {
|
|
||||||
log.warn('missing signedHeaders');
|
|
||||||
return authParams;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
const signature = queryObj['X-Amz-Signature'];
|
|
||||||
if (signature && signature.length === 64) {
|
|
||||||
authParams.signatureFromRequest = signature;
|
|
||||||
} else {
|
|
||||||
log.warn('missing signature');
|
|
||||||
return authParams;
|
|
||||||
}
|
|
||||||
|
|
||||||
const timestamp = queryObj['X-Amz-Date'];
|
|
||||||
if (timestamp && timestamp.length === 16) {
|
|
||||||
authParams.timestamp = timestamp;
|
|
||||||
} else {
|
|
||||||
log.warn('missing or invalid timestamp',
|
|
||||||
{ timestamp: queryObj['X-Amz-Date'] });
|
|
||||||
return authParams;
|
|
||||||
}
|
|
||||||
|
|
||||||
const expiry = Number.parseInt(queryObj['X-Amz-Expires'] ?? 'nope', 10);
|
|
||||||
const sevenDays = 604800;
|
|
||||||
if (expiry && (expiry > 0 && expiry <= sevenDays)) {
|
|
||||||
authParams.expiry = expiry;
|
|
||||||
} else {
|
|
||||||
log.warn('invalid expiry', { expiry });
|
|
||||||
return authParams;
|
|
||||||
}
|
|
||||||
|
|
||||||
const credential = queryObj['X-Amz-Credential'];
|
|
||||||
if (credential && credential.length > 28 && credential.indexOf('/') > -1) {
|
|
||||||
// @ts-ignore
|
|
||||||
authParams.credential = credential.split('/');
|
|
||||||
} else {
|
|
||||||
log.warn('invalid credential param', { credential });
|
|
||||||
return authParams;
|
|
||||||
}
|
|
||||||
return authParams;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extract and validate components from auth header
|
|
||||||
* @param authHeader - authorization header from request
|
|
||||||
* @param log - logging object
|
|
||||||
* @return object containing extracted auth header items for authV4
|
|
||||||
*/
|
|
||||||
export function extractAuthItems(authHeader: string, log: Logger) {
|
|
||||||
const authItems: {
|
|
||||||
credentialsArr?: [string, string, string, string, string];
|
|
||||||
signedHeaders?: string;
|
|
||||||
signatureFromRequest?: string;
|
|
||||||
} = {};
|
|
||||||
const authArray = authHeader.replace('AWS4-HMAC-SHA256 ', '').split(',');
|
|
||||||
|
|
||||||
if (authArray.length < 3) {
|
|
||||||
return authItems;
|
|
||||||
}
|
|
||||||
// extract authorization components
|
|
||||||
const credentialStr = authArray[0];
|
|
||||||
const signedHeadersStr = authArray[1];
|
|
||||||
const signatureStr = authArray[2];
|
|
||||||
log.trace('credentials from request', { credentialStr });
|
|
||||||
if (
|
|
||||||
credentialStr &&
|
|
||||||
credentialStr.trim().startsWith('Credential=') &&
|
|
||||||
credentialStr.indexOf('/') > -1
|
|
||||||
) {
|
|
||||||
// @ts-ignore
|
|
||||||
authItems.credentialsArr = credentialStr
|
|
||||||
.trim().replace('Credential=', '').split('/');
|
|
||||||
} else {
|
|
||||||
log.warn('missing credentials');
|
|
||||||
}
|
|
||||||
log.trace('signed headers from request', { signedHeadersStr });
|
|
||||||
if (signedHeadersStr && signedHeadersStr.trim()
|
|
||||||
.startsWith('SignedHeaders=')) {
|
|
||||||
authItems.signedHeaders = signedHeadersStr
|
|
||||||
.trim().replace('SignedHeaders=', '');
|
|
||||||
} else {
|
|
||||||
log.warn('missing signed headers');
|
|
||||||
}
|
|
||||||
log.trace('signature from request', { signatureStr });
|
|
||||||
if (signatureStr && signatureStr.trim().startsWith('Signature=')) {
|
|
||||||
authItems.signatureFromRequest = signatureStr
|
|
||||||
.trim().replace('Signature=', '');
|
|
||||||
} else {
|
|
||||||
log.warn('missing signature');
|
|
||||||
}
|
|
||||||
return authItems;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks whether the signed headers include the host header
|
|
||||||
* and all x-amz- and x-scal- headers in request
|
|
||||||
* @param signedHeaders - signed headers sent with request
|
|
||||||
* @param allHeaders - request.headers
|
|
||||||
* @return true if all x-amz-headers included and false if not
|
|
||||||
*/
|
|
||||||
export function areSignedHeadersComplete(signedHeaders: string, allHeaders: Headers) {
|
|
||||||
const signedHeadersList = signedHeaders.split(';');
|
|
||||||
if (signedHeadersList.indexOf('host') === -1) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
const headers = Object.keys(allHeaders);
|
|
||||||
for (let i = 0; i < headers.length; i++) {
|
|
||||||
if ((headers[i].startsWith('x-amz-')
|
|
||||||
|| headers[i].startsWith('x-scal-'))
|
|
||||||
&& signedHeadersList.indexOf(headers[i]) === -1) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
|
@ -1,569 +0,0 @@
|
||||||
import cluster, { Worker } from 'cluster';
|
|
||||||
import * as werelogs from 'werelogs';
|
|
||||||
|
|
||||||
import { default as errors } from '../../lib/errors';
|
|
||||||
|
|
||||||
const rpcLogger = new werelogs.Logger('ClusterRPC');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remote procedure calls support between cluster workers.
|
|
||||||
*
|
|
||||||
* When using the cluster module, new processes are forked and are
|
|
||||||
* dispatched workloads, usually HTTP requests. The ClusterRPC module
|
|
||||||
* implements a RPC system to send commands to all cluster worker
|
|
||||||
* processes at once from any particular worker, and retrieve their
|
|
||||||
* individual command results, like a distributed map operation.
|
|
||||||
*
|
|
||||||
* The existing nodejs cluster IPC channel is setup from the primary
|
|
||||||
* to each worker, but not between workers, so there has to be a hop
|
|
||||||
* by the primary.
|
|
||||||
*
|
|
||||||
* How a command is treated:
|
|
||||||
*
|
|
||||||
* - a worker sends a command message to the primary
|
|
||||||
*
|
|
||||||
* - the primary then forwards that command to each existing worker
|
|
||||||
* (including the requestor)
|
|
||||||
*
|
|
||||||
* - each worker then executes the command and returns a result or an
|
|
||||||
* error
|
|
||||||
*
|
|
||||||
* - the primary gathers all workers results into an array
|
|
||||||
*
|
|
||||||
* - finally, the primary dispatches the results array to the original
|
|
||||||
* requesting worker
|
|
||||||
*
|
|
||||||
*
|
|
||||||
* Limitations:
|
|
||||||
*
|
|
||||||
* - The command payload must be serializable, which means that:
|
|
||||||
* - it should not contain circular references
|
|
||||||
* - it should be of a reasonable size to be sent in a single RPC message
|
|
||||||
*
|
|
||||||
* - The "toWorkers" parameter of value "*" targets the set of workers
|
|
||||||
* that are available at the time the command is dispatched. Any new
|
|
||||||
* worker spawned after the command has been dispatched for
|
|
||||||
* processing, but before the command completes, don't execute
|
|
||||||
* the command and hence are not part of the results array.
|
|
||||||
*
|
|
||||||
*
|
|
||||||
* To set it up:
|
|
||||||
*
|
|
||||||
* - On the primary:
|
|
||||||
* if (cluster.isPrimary) {
|
|
||||||
* setupRPCPrimary();
|
|
||||||
* }
|
|
||||||
*
|
|
||||||
* - On the workers:
|
|
||||||
* if (!cluster.isPrimary) {
|
|
||||||
* setupRPCWorker({
|
|
||||||
* handler1: (payload: object, uids: string, callback: HandlerCallback) => void,
|
|
||||||
* handler2: ...
|
|
||||||
* });
|
|
||||||
* }
|
|
||||||
* Handler functions will be passed the command payload, request
|
|
||||||
* serialized uids, and must call the callback when the worker is done
|
|
||||||
* processing the command:
|
|
||||||
* callback(error: Error | null | undefined, result?: any)
|
|
||||||
*
|
|
||||||
* When this setup is done, any worker can start sending commands by calling
|
|
||||||
* the async function sendWorkerCommand().
|
|
||||||
*/
|
|
||||||
|
|
||||||
// exported types
|
|
||||||
|
|
||||||
export type ResultObject = {
|
|
||||||
error: Error | null;
|
|
||||||
result: any;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* saved Promise for sendWorkerCommand
|
|
||||||
*/
|
|
||||||
export type CommandPromise = {
|
|
||||||
resolve: (results?: ResultObject[]) => void;
|
|
||||||
reject: (error: Error) => void;
|
|
||||||
timeout: NodeJS.Timeout | null;
|
|
||||||
};
|
|
||||||
export type HandlerCallback = (error: (Error & { code?: number }) | null | undefined, result?: any) => void;
|
|
||||||
export type HandlerFunction = (payload: object, uids: string, callback: HandlerCallback) => void;
|
|
||||||
export type HandlersMap = {
|
|
||||||
[index: string]: HandlerFunction;
|
|
||||||
};
|
|
||||||
export type PrimaryHandlerFunction = (worker: Worker, payload: object, uids: string, callback: HandlerCallback) => void;
|
|
||||||
export type PrimaryHandlersMap = Record<string, PrimaryHandlerFunction>;
|
|
||||||
|
|
||||||
// private types
|
|
||||||
|
|
||||||
type RPCMessage<T extends string, P> = {
|
|
||||||
type: T;
|
|
||||||
uids: string;
|
|
||||||
payload: P;
|
|
||||||
};
|
|
||||||
|
|
||||||
type RPCCommandMessage = RPCMessage<'cluster-rpc:command', any> & {
|
|
||||||
toWorkers: string;
|
|
||||||
toHandler: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
type MarshalledResultObject = {
|
|
||||||
error: string | null;
|
|
||||||
errorCode?: number;
|
|
||||||
result: any;
|
|
||||||
};
|
|
||||||
|
|
||||||
type RPCCommandResultMessage = RPCMessage<'cluster-rpc:commandResult', MarshalledResultObject>;
|
|
||||||
|
|
||||||
type RPCCommandResultsMessage = RPCMessage<'cluster-rpc:commandResults', {
|
|
||||||
results: MarshalledResultObject[];
|
|
||||||
}>;
|
|
||||||
|
|
||||||
type RPCCommandErrorMessage = RPCMessage<'cluster-rpc:commandError', {
|
|
||||||
error: string;
|
|
||||||
}>;
|
|
||||||
|
|
||||||
interface RPCSetupOptions {
|
|
||||||
/**
|
|
||||||
* As werelogs is not a peerDependency, arsenal and a parent project
|
|
||||||
* might have their own separate versions duplicated in dependencies.
|
|
||||||
* The config are therefore not shared.
|
|
||||||
* Use this to propagate werelogs config to arsenal's ClusterRPC.
|
|
||||||
*/
|
|
||||||
werelogsConfig?: Parameters<typeof werelogs.configure>[0];
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* In primary: store worker IDs that are waiting to be dispatched
|
|
||||||
* their command's results, as a mapping.
|
|
||||||
*/
|
|
||||||
const uidsToWorkerId: {
|
|
||||||
[index: string]: number;
|
|
||||||
} = {};
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* In primary: store worker responses for commands in progress as a
|
|
||||||
* mapping.
|
|
||||||
*
|
|
||||||
* Result objects are 'null' while the worker is still processing the
|
|
||||||
* command. When a worker finishes processing it stores the result as:
|
|
||||||
* {
|
|
||||||
* error: string | null,
|
|
||||||
* result: any
|
|
||||||
* }
|
|
||||||
*/
|
|
||||||
const uidsToCommandResults: {
|
|
||||||
[index: string]: {
|
|
||||||
[index: number]: MarshalledResultObject | null;
|
|
||||||
};
|
|
||||||
} = {};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* In workers: store promise callbacks for commands waiting to be
|
|
||||||
* dispatched, as a mapping.
|
|
||||||
*/
|
|
||||||
const uidsToCommandPromise: {
|
|
||||||
[index: string]: CommandPromise;
|
|
||||||
} = {};
|
|
||||||
|
|
||||||
|
|
||||||
function _isRpcMessage(message) {
|
|
||||||
return (message !== null &&
|
|
||||||
typeof message === 'object' &&
|
|
||||||
typeof message.type === 'string' &&
|
|
||||||
message.type.startsWith('cluster-rpc:'));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Setup cluster RPC system on the primary
|
|
||||||
*
|
|
||||||
* @param {object} [handlers] - mapping of handler names to handler functions
|
|
||||||
* handler function:
|
|
||||||
* `handler({Worker} worker, {object} payload, {string} uids, {function} callback)`
|
|
||||||
* handler callback must be called when worker is done with the command:
|
|
||||||
* `callback({Error|null} error, {any} [result])`
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
export function setupRPCPrimary(handlers?: PrimaryHandlersMap, options?: RPCSetupOptions) {
|
|
||||||
if (options?.werelogsConfig) {
|
|
||||||
werelogs.configure(options.werelogsConfig);
|
|
||||||
}
|
|
||||||
cluster.on('message', (worker, message) => {
|
|
||||||
if (_isRpcMessage(message)) {
|
|
||||||
_handlePrimaryMessage(worker, message, handlers);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Setup RPCs on a cluster worker process
|
|
||||||
*
|
|
||||||
* @param {object} handlers - mapping of handler names to handler functions
|
|
||||||
* handler function:
|
|
||||||
* handler({object} payload, {string} uids, {function} callback)
|
|
||||||
* handler callback must be called when worker is done with the command:
|
|
||||||
* callback({Error|null} error, {any} [result])
|
|
||||||
* @return {undefined}
|
|
||||||
* }
|
|
||||||
*/
|
|
||||||
export function setupRPCWorker(handlers: HandlersMap, options?: RPCSetupOptions) {
|
|
||||||
if (!process.send) {
|
|
||||||
throw new Error('fatal: cannot setup cluster RPC: "process.send" is not available');
|
|
||||||
}
|
|
||||||
if (options?.werelogsConfig) {
|
|
||||||
werelogs.configure(options.werelogsConfig);
|
|
||||||
}
|
|
||||||
process.on('message', (message: RPCCommandMessage | RPCCommandResultsMessage) => {
|
|
||||||
if (_isRpcMessage(message)) {
|
|
||||||
_handleWorkerMessage(message, handlers);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Send a command for workers to execute in parallel, and wait for results
|
|
||||||
*
|
|
||||||
* @param {string} toWorkers - which workers should execute the command
|
|
||||||
* Currently the supported values are:
|
|
||||||
* - "*", meaning all workers will execute the command
|
|
||||||
* - "PRIMARY", meaning primary process will execute the command
|
|
||||||
* @param {string} toHandler - name of handler that will execute the
|
|
||||||
* command in workers, as declared in setupRPCWorker() parameter object
|
|
||||||
* @param {string} uids - unique identifier of the command, must be
|
|
||||||
* unique across all commands in progress
|
|
||||||
* @param {object} payload - message payload, sent as-is to the handler
|
|
||||||
* @param {number} [timeoutMs=60000] - timeout the command with a
|
|
||||||
* "RequestTimeout" error after this number of milliseconds - set to 0
|
|
||||||
* to disable timeouts (the command may then hang forever)
|
|
||||||
* @returns {Promise}
|
|
||||||
*/
|
|
||||||
export async function sendWorkerCommand(
|
|
||||||
toWorkers: string,
|
|
||||||
toHandler: string,
|
|
||||||
uids: string,
|
|
||||||
payload: object,
|
|
||||||
timeoutMs: number = 60000
|
|
||||||
) {
|
|
||||||
if (typeof uids !== 'string') {
|
|
||||||
rpcLogger.error('missing or invalid "uids" field', { uids });
|
|
||||||
throw errors.MissingParameter;
|
|
||||||
}
|
|
||||||
if (uidsToCommandPromise[uids] !== undefined) {
|
|
||||||
rpcLogger.error('a command is already in progress with same uids', { uids });
|
|
||||||
throw errors.OperationAborted;
|
|
||||||
}
|
|
||||||
rpcLogger.info('sending command', { toWorkers, toHandler, uids, payload });
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
let timeout: NodeJS.Timeout | null = null;
|
|
||||||
if (timeoutMs) {
|
|
||||||
timeout = setTimeout(() => {
|
|
||||||
delete uidsToCommandPromise[uids];
|
|
||||||
reject(errors.RequestTimeout);
|
|
||||||
}, timeoutMs);
|
|
||||||
}
|
|
||||||
uidsToCommandPromise[uids] = { resolve, reject, timeout };
|
|
||||||
const message: RPCCommandMessage = {
|
|
||||||
type: 'cluster-rpc:command',
|
|
||||||
toWorkers,
|
|
||||||
toHandler,
|
|
||||||
uids,
|
|
||||||
payload,
|
|
||||||
};
|
|
||||||
return process.send?.(message);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the number of commands in flight
|
|
||||||
* @returns {number}
|
|
||||||
*/
|
|
||||||
export function getPendingCommandsCount() {
|
|
||||||
return Object.keys(uidsToCommandPromise).length;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function _dispatchCommandResultsToWorker(
|
|
||||||
worker: Worker,
|
|
||||||
uids: string,
|
|
||||||
resultsArray: MarshalledResultObject[]
|
|
||||||
): void {
|
|
||||||
const message: RPCCommandResultsMessage = {
|
|
||||||
type: 'cluster-rpc:commandResults',
|
|
||||||
uids,
|
|
||||||
payload: {
|
|
||||||
results: resultsArray,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
worker.send(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
function _dispatchCommandErrorToWorker(
|
|
||||||
worker: Worker,
|
|
||||||
uids: string,
|
|
||||||
error: Error,
|
|
||||||
): void {
|
|
||||||
const message: RPCCommandErrorMessage = {
|
|
||||||
type: 'cluster-rpc:commandError',
|
|
||||||
uids,
|
|
||||||
payload: {
|
|
||||||
error: error.message,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
worker.send(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
function _sendPrimaryCommandResult(
|
|
||||||
worker: Worker,
|
|
||||||
uids: string,
|
|
||||||
error: (Error & { code?: number }) | null | undefined,
|
|
||||||
result?: any
|
|
||||||
): void {
|
|
||||||
const message: RPCCommandResultsMessage = {
|
|
||||||
type: 'cluster-rpc:commandResults',
|
|
||||||
uids,
|
|
||||||
payload: {
|
|
||||||
results: [{ error: error?.message || null, errorCode: error?.code, result }],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
worker.send?.(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
function _handlePrimaryCommandMessage(
|
|
||||||
fromWorker: Worker,
|
|
||||||
logger: any,
|
|
||||||
message: RPCCommandMessage,
|
|
||||||
handlers?: PrimaryHandlersMap
|
|
||||||
): void {
|
|
||||||
const { toWorkers, toHandler, uids, payload } = message;
|
|
||||||
if (toWorkers === '*') {
|
|
||||||
if (uidsToWorkerId[uids] !== undefined) {
|
|
||||||
logger.warn('new command already has a waiting worker with same uids', {
|
|
||||||
uids, workerId: uidsToWorkerId[uids],
|
|
||||||
});
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const commandResults = {};
|
|
||||||
for (const workerId of Object.keys(cluster.workers || {})) {
|
|
||||||
commandResults[workerId] = null;
|
|
||||||
}
|
|
||||||
uidsToWorkerId[uids] = fromWorker?.id;
|
|
||||||
uidsToCommandResults[uids] = commandResults;
|
|
||||||
|
|
||||||
for (const [workerId, worker] of Object.entries(cluster.workers || {})) {
|
|
||||||
logger.debug('sending command message to worker', {
|
|
||||||
workerId, toHandler, payload,
|
|
||||||
});
|
|
||||||
if (worker) {
|
|
||||||
worker.send(message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if (toWorkers === 'PRIMARY') {
|
|
||||||
const { toHandler, uids, payload } = message;
|
|
||||||
const cb: HandlerCallback = (err, result) => _sendPrimaryCommandResult(fromWorker, uids, err, result);
|
|
||||||
|
|
||||||
if (toHandler in (handlers || {})) {
|
|
||||||
return handlers![toHandler](fromWorker, payload, uids, cb);
|
|
||||||
}
|
|
||||||
logger.error('no such handler in "toHandler" field from worker command message', {
|
|
||||||
toHandler,
|
|
||||||
});
|
|
||||||
return cb(errors.NotImplemented);
|
|
||||||
} else {
|
|
||||||
logger.error('unsupported "toWorkers" field from worker command message', {
|
|
||||||
toWorkers,
|
|
||||||
});
|
|
||||||
if (fromWorker) {
|
|
||||||
_dispatchCommandErrorToWorker(fromWorker, uids, errors.NotImplemented);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function _handlePrimaryCommandResultMessage(
|
|
||||||
fromWorkerId: number,
|
|
||||||
logger: any,
|
|
||||||
message: RPCCommandResultMessage
|
|
||||||
): void {
|
|
||||||
const { uids, payload } = message;
|
|
||||||
const commandResults = uidsToCommandResults[uids];
|
|
||||||
if (!commandResults) {
|
|
||||||
logger.warn('received command response message from worker for command not in flight', {
|
|
||||||
workerId: fromWorkerId,
|
|
||||||
uids,
|
|
||||||
});
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
if (commandResults[fromWorkerId] === undefined) {
|
|
||||||
logger.warn('received command response message with unexpected worker ID', {
|
|
||||||
workerId: fromWorkerId,
|
|
||||||
uids,
|
|
||||||
});
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
if (commandResults[fromWorkerId] !== null) {
|
|
||||||
logger.warn('ignoring duplicate command response from worker', {
|
|
||||||
workerId: fromWorkerId,
|
|
||||||
uids,
|
|
||||||
});
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
commandResults[fromWorkerId] = payload;
|
|
||||||
const commandResultsArray = Object.values(commandResults);
|
|
||||||
if (commandResultsArray.every(response => response !== null)) {
|
|
||||||
logger.debug('all workers responded to command', { uids });
|
|
||||||
const completeCommandResultsArray = <MarshalledResultObject[]> commandResultsArray;
|
|
||||||
const toWorkerId = uidsToWorkerId[uids];
|
|
||||||
const toWorker = cluster.workers?.[toWorkerId];
|
|
||||||
|
|
||||||
delete uidsToCommandResults[uids];
|
|
||||||
delete uidsToWorkerId[uids];
|
|
||||||
|
|
||||||
if (!toWorker) {
|
|
||||||
logger.warn('worker shut down while its command was executing', {
|
|
||||||
workerId: toWorkerId, uids,
|
|
||||||
});
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
// send back response to original worker
|
|
||||||
_dispatchCommandResultsToWorker(toWorker, uids, completeCommandResultsArray);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function _handlePrimaryMessage(
|
|
||||||
fromWorker: Worker,
|
|
||||||
message: RPCCommandMessage | RPCCommandResultMessage,
|
|
||||||
handlers?: PrimaryHandlersMap
|
|
||||||
): void {
|
|
||||||
const { type: messageType, uids } = message;
|
|
||||||
const logger = rpcLogger.newRequestLoggerFromSerializedUids(uids);
|
|
||||||
logger.debug('primary received message from worker', {
|
|
||||||
workerId: fromWorker?.id, rpcMessage: message,
|
|
||||||
});
|
|
||||||
if (messageType === 'cluster-rpc:command') {
|
|
||||||
return _handlePrimaryCommandMessage(fromWorker, logger, message, handlers);
|
|
||||||
}
|
|
||||||
if (messageType === 'cluster-rpc:commandResult') {
|
|
||||||
return _handlePrimaryCommandResultMessage(fromWorker?.id, logger, message);
|
|
||||||
}
|
|
||||||
logger.error('unsupported message type', {
|
|
||||||
workerId: fromWorker?.id, messageType, uids,
|
|
||||||
});
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
function _sendWorkerCommandResult(
|
|
||||||
uids: string,
|
|
||||||
error: Error | null | undefined,
|
|
||||||
result?: any
|
|
||||||
): void {
|
|
||||||
const message: RPCCommandResultMessage = {
|
|
||||||
type: 'cluster-rpc:commandResult',
|
|
||||||
uids,
|
|
||||||
payload: {
|
|
||||||
error: error ? error.message : null,
|
|
||||||
result,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
process.send?.(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
function _handleWorkerCommandMessage(
|
|
||||||
logger: any,
|
|
||||||
message: RPCCommandMessage,
|
|
||||||
handlers: HandlersMap
|
|
||||||
): void {
|
|
||||||
const { toHandler, uids, payload } = message;
|
|
||||||
const cb: HandlerCallback = (err, result) => _sendWorkerCommandResult(uids, err, result);
|
|
||||||
|
|
||||||
if (toHandler in handlers) {
|
|
||||||
return handlers[toHandler](payload, uids, cb);
|
|
||||||
}
|
|
||||||
logger.error('no such handler in "toHandler" field from worker command message', {
|
|
||||||
toHandler,
|
|
||||||
});
|
|
||||||
return cb(errors.NotImplemented);
|
|
||||||
}
|
|
||||||
|
|
||||||
function _handleWorkerCommandResultsMessage(
|
|
||||||
logger: any,
|
|
||||||
message: RPCCommandResultsMessage,
|
|
||||||
): void {
|
|
||||||
const { uids, payload } = message;
|
|
||||||
const { results } = payload;
|
|
||||||
const commandPromise: CommandPromise = uidsToCommandPromise[uids];
|
|
||||||
if (commandPromise === undefined) {
|
|
||||||
logger.error('missing promise for command results', { uids, payload });
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
if (commandPromise.timeout) {
|
|
||||||
clearTimeout(commandPromise.timeout);
|
|
||||||
}
|
|
||||||
delete uidsToCommandPromise[uids];
|
|
||||||
const unmarshalledResults = results.map(workerResult => {
|
|
||||||
let workerError: Error | null = null;
|
|
||||||
if (workerResult.error) {
|
|
||||||
if (workerResult.error in errors) {
|
|
||||||
workerError = errors[workerResult.error];
|
|
||||||
} else {
|
|
||||||
workerError = new Error(workerResult.error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (workerError && workerResult.errorCode) {
|
|
||||||
(workerError as Error & { code: number }).code = workerResult.errorCode;
|
|
||||||
}
|
|
||||||
const unmarshalledResult: ResultObject = {
|
|
||||||
error: workerError,
|
|
||||||
result: workerResult.result,
|
|
||||||
};
|
|
||||||
return unmarshalledResult;
|
|
||||||
});
|
|
||||||
return commandPromise.resolve(unmarshalledResults);
|
|
||||||
}
|
|
||||||
|
|
||||||
function _handleWorkerCommandErrorMessage(
|
|
||||||
logger: any,
|
|
||||||
message: RPCCommandErrorMessage,
|
|
||||||
): void {
|
|
||||||
const { uids, payload } = message;
|
|
||||||
const { error } = payload;
|
|
||||||
const commandPromise: CommandPromise = uidsToCommandPromise[uids];
|
|
||||||
if (commandPromise === undefined) {
|
|
||||||
logger.error('missing promise for command results', { uids, payload });
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
if (commandPromise.timeout) {
|
|
||||||
clearTimeout(commandPromise.timeout);
|
|
||||||
}
|
|
||||||
delete uidsToCommandPromise[uids];
|
|
||||||
let commandError: Error | null = null;
|
|
||||||
if (error in errors) {
|
|
||||||
commandError = errors[error];
|
|
||||||
} else {
|
|
||||||
commandError = new Error(error);
|
|
||||||
}
|
|
||||||
return commandPromise.reject(<Error> commandError);
|
|
||||||
}
|
|
||||||
|
|
||||||
function _handleWorkerMessage(
|
|
||||||
message: RPCCommandMessage | RPCCommandResultsMessage | RPCCommandErrorMessage,
|
|
||||||
handlers: HandlersMap
|
|
||||||
): void {
|
|
||||||
const { type: messageType, uids } = message;
|
|
||||||
const workerId = cluster.worker?.id;
|
|
||||||
const logger = rpcLogger.newRequestLoggerFromSerializedUids(uids);
|
|
||||||
logger.debug('worker received message from primary', {
|
|
||||||
workerId, rpcMessage: message,
|
|
||||||
});
|
|
||||||
if (messageType === 'cluster-rpc:command') {
|
|
||||||
return _handleWorkerCommandMessage(logger, message, handlers);
|
|
||||||
}
|
|
||||||
if (messageType === 'cluster-rpc:commandResults') {
|
|
||||||
return _handleWorkerCommandResultsMessage(logger, message);
|
|
||||||
}
|
|
||||||
if (messageType === 'cluster-rpc:commandError') {
|
|
||||||
return _handleWorkerCommandErrorMessage(logger, message);
|
|
||||||
}
|
|
||||||
logger.error('unsupported message type', {
|
|
||||||
workerId, messageType,
|
|
||||||
});
|
|
||||||
return undefined;
|
|
||||||
}
|
|
177
lib/constants.ts
|
@ -1,177 +0,0 @@
|
||||||
import * as crypto from 'crypto';
|
|
||||||
|
|
||||||
// The min value here is to manage further backward compat if we
|
|
||||||
// need it
|
|
||||||
// Default value
|
|
||||||
export const vaultGeneratedIamSecurityTokenSizeMin = 128;
|
|
||||||
// Safe to assume that a typical token size is less than 8192 bytes
|
|
||||||
export const vaultGeneratedIamSecurityTokenSizeMax = 8192;
|
|
||||||
// Base-64
|
|
||||||
export const vaultGeneratedIamSecurityTokenPattern = /^[A-Za-z0-9/+=]*$/;
|
|
||||||
|
|
||||||
// info about the iam security token
|
|
||||||
export const iamSecurityToken = {
|
|
||||||
min: vaultGeneratedIamSecurityTokenSizeMin,
|
|
||||||
max: vaultGeneratedIamSecurityTokenSizeMax,
|
|
||||||
pattern: vaultGeneratedIamSecurityTokenPattern,
|
|
||||||
};
|
|
||||||
// PublicId is used as the canonicalID for a request that contains
|
|
||||||
// no authentication information. Requestor can access
|
|
||||||
// only public resources
|
|
||||||
export const publicId = 'http://acs.amazonaws.com/groups/global/AllUsers';
|
|
||||||
export const zenkoServiceAccount = 'http://acs.zenko.io/accounts/service';
|
|
||||||
export const metadataFileNamespace = '/MDFile';
|
|
||||||
export const dataFileURL = '/DataFile';
|
|
||||||
export const passthroughFileURL = '/PassthroughFile';
|
|
||||||
// AWS states max size for user-defined metadata
|
|
||||||
// (x-amz-meta- headers) is 2 KB:
|
|
||||||
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
|
|
||||||
// In testing, AWS seems to allow up to 88 more bytes,
|
|
||||||
// so we do the same.
|
|
||||||
export const maximumMetaHeadersSize = 2136;
|
|
||||||
export const emptyFileMd5 = 'd41d8cd98f00b204e9800998ecf8427e';
|
|
||||||
// Version 2 changes the format of the data location property
|
|
||||||
// Version 3 adds the dataStoreName attribute
|
|
||||||
// Version 4 add the Creation-Time and Content-Language attributes,
|
|
||||||
// and add support for x-ms-meta-* headers in UserMetadata
|
|
||||||
// Version 5 adds the azureInfo structure
|
|
||||||
// Version 6 adds a "deleted" flag that is updated to true before
|
|
||||||
// the object gets deleted. This is done to keep object metadata in the
|
|
||||||
// oplog when deleting the object, as oplog deletion events don't contain
|
|
||||||
// any metadata of the object.
|
|
||||||
// version 6 also adds the "isPHD" flag that is used to indicate that the master
|
|
||||||
// object is a placeholder and is not up to date.
|
|
||||||
export const mdModelVersion = 6;
|
|
||||||
/*
|
|
||||||
* Splitter is used to build the object name for the overview of a
|
|
||||||
* multipart upload and to build the object names for each part of a
|
|
||||||
* multipart upload. These objects with large names are then stored in
|
|
||||||
* metadata in a "shadow bucket" to a real bucket. The shadow bucket
|
|
||||||
* contains all ongoing multipart uploads. We include in the object
|
|
||||||
* name some of the info we might need to pull about an open multipart
|
|
||||||
* upload or about an individual part with each piece of info separated
|
|
||||||
* by the splitter. We can then extract each piece of info by splitting
|
|
||||||
* the object name string with this splitter.
|
|
||||||
* For instance, assuming a splitter of '...!*!',
|
|
||||||
* the name of the upload overview would be:
|
|
||||||
* overview...!*!objectKey...!*!uploadId
|
|
||||||
* For instance, the name of a part would be:
|
|
||||||
* uploadId...!*!partNumber
|
|
||||||
*
|
|
||||||
* The sequence of characters used in the splitter should not occur
|
|
||||||
* elsewhere in the pieces of info to avoid splitting where not
|
|
||||||
* intended.
|
|
||||||
*
|
|
||||||
* Splitter is also used in adding bucketnames to the
|
|
||||||
* namespacerusersbucket. The object names added to the
|
|
||||||
* namespaceusersbucket are of the form:
|
|
||||||
* canonicalID...!*!bucketname
|
|
||||||
*/
|
|
||||||
|
|
||||||
export const splitter = '..|..';
|
|
||||||
export const usersBucket = 'users..bucket';
|
|
||||||
// MPU Bucket Prefix is used to create the name of the shadow
|
|
||||||
// bucket used for multipart uploads. There is one shadow mpu
|
|
||||||
// bucket per bucket and its name is the mpuBucketPrefix followed
|
|
||||||
// by the name of the final destination bucket for the object
|
|
||||||
// once the multipart upload is complete.
|
|
||||||
export const mpuBucketPrefix = 'mpuShadowBucket';
|
|
||||||
// since aws s3 does not allow capitalized buckets, these may be
|
|
||||||
// used for special internal purposes
|
|
||||||
export const permittedCapitalizedBuckets = {
|
|
||||||
METADATA: true,
|
|
||||||
};
|
|
||||||
// Setting a lower object key limit to account for:
|
|
||||||
// - Mongo key limit of 1012 bytes
|
|
||||||
// - Version ID in Mongo Key if versioned of 33
|
|
||||||
// - Max bucket name length if bucket match false of 63
|
|
||||||
// - Extra prefix slash for bucket prefix if bucket match of 1
|
|
||||||
export const objectKeyByteLimit = 915;
|
|
||||||
/* delimiter for location-constraint. The location constraint will be able
|
|
||||||
* to include the ingestion flag
|
|
||||||
*/
|
|
||||||
export const zenkoSeparator = ':';
|
|
||||||
/* eslint-disable camelcase */
|
|
||||||
export const externalBackends = { aws_s3: true, azure: true, gcp: true, pfs: true };
|
|
||||||
export const replicationBackends = { aws_s3: true, azure: true, gcp: true };
|
|
||||||
// hex digest of sha256 hash of empty string:
|
|
||||||
export const emptyStringHash = crypto.createHash('sha256')
|
|
||||||
.update('', 'binary').digest('hex');
|
|
||||||
export const mpuMDStoredExternallyBackend = { aws_s3: true, gcp: true };
|
|
||||||
// AWS sets a minimum size limit for parts except for the last part.
|
|
||||||
// http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
|
|
||||||
export const minimumAllowedPartSize = 5242880;
|
|
||||||
export const gcpMaximumAllowedPartCount = 1024;
|
|
||||||
// GCP Object Tagging Prefix
|
|
||||||
export const gcpTaggingPrefix = 'aws-tag-';
|
|
||||||
export const productName = 'APN/1.0 Scality/1.0 Scality CloudServer for Zenko';
|
|
||||||
export const legacyLocations = ['sproxyd', 'legacy'];
|
|
||||||
// healthcheck default call from nginx is every 2 seconds
|
|
||||||
// for external backends, don't call unless at least 1 minute
|
|
||||||
// (60,000 milliseconds) since last call
|
|
||||||
export const externalBackendHealthCheckInterval = 60000;
|
|
||||||
// some of the available data backends (if called directly rather
|
|
||||||
// than through the multiple backend gateway) need a key provided
|
|
||||||
// as a string as first parameter of the get/delete methods.
|
|
||||||
export const clientsRequireStringKey = { sproxyd: true, cdmi: true };
|
|
||||||
export const hasCopyPartBackends = { aws_s3: true, gcp: true };
|
|
||||||
export const versioningNotImplBackends = { azure: true, gcp: true };
|
|
||||||
// user metadata applied on zenko-created objects
|
|
||||||
export const zenkoIDHeader = 'x-amz-meta-zenko-instance-id';
|
|
||||||
// Default expiration value of the S3 pre-signed URL duration
|
|
||||||
// 604800 seconds (seven days).
|
|
||||||
export const defaultPreSignedURLExpiry = 7 * 24 * 60 * 60;
|
|
||||||
// Regex for ISO-8601 formatted date
|
|
||||||
export const shortIso8601Regex = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z/;
|
|
||||||
export const longIso8601Regex = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/;
|
|
||||||
export const supportedNotificationEvents = new Set([
|
|
||||||
's3:ObjectCreated:*',
|
|
||||||
's3:ObjectCreated:Put',
|
|
||||||
's3:ObjectCreated:Copy',
|
|
||||||
's3:ObjectCreated:CompleteMultipartUpload',
|
|
||||||
's3:ObjectRemoved:*',
|
|
||||||
's3:ObjectRemoved:Delete',
|
|
||||||
's3:ObjectRemoved:DeleteMarkerCreated',
|
|
||||||
's3:Replication:OperationFailedReplication',
|
|
||||||
's3:ObjectTagging:*',
|
|
||||||
's3:ObjectTagging:Put',
|
|
||||||
's3:ObjectTagging:Delete',
|
|
||||||
's3:ObjectAcl:Put',
|
|
||||||
's3:ObjectRestore:*',
|
|
||||||
's3:ObjectRestore:Post',
|
|
||||||
's3:ObjectRestore:Completed',
|
|
||||||
's3:ObjectRestore:Delete',
|
|
||||||
's3:LifecycleTransition',
|
|
||||||
's3:LifecycleExpiration:*',
|
|
||||||
's3:LifecycleExpiration:DeleteMarkerCreated',
|
|
||||||
's3:LifecycleExpiration:Delete',
|
|
||||||
]);
|
|
||||||
export const notificationArnPrefix = 'arn:scality:bucketnotif';
|
|
||||||
// HTTP server keep-alive timeout is set to a higher value than
|
|
||||||
// client's free sockets timeout to avoid the risk of triggering
|
|
||||||
// ECONNRESET errors if the server closes the connection at the
|
|
||||||
// exact moment clients attempt to reuse an established connection
|
|
||||||
// for a new request.
|
|
||||||
//
|
|
||||||
// Note: the ability to close inactive connections on the client
|
|
||||||
// after httpClientFreeSocketsTimeout milliseconds requires the
|
|
||||||
// use of "agentkeepalive" module instead of the regular node.js
|
|
||||||
// http.Agent.
|
|
||||||
export const httpServerKeepAliveTimeout = 60000;
|
|
||||||
export const httpClientFreeSocketTimeout = 55000;
|
|
||||||
export const supportedLifecycleRules = [
|
|
||||||
'expiration',
|
|
||||||
'noncurrentVersionExpiration',
|
|
||||||
'abortIncompleteMultipartUpload',
|
|
||||||
'transitions',
|
|
||||||
'noncurrentVersionTransition',
|
|
||||||
];
|
|
||||||
// Maximum number of buckets to cache (bucket metadata)
|
|
||||||
export const maxCachedBuckets = process.env.METADATA_MAX_CACHED_BUCKETS ?
|
|
||||||
Number(process.env.METADATA_MAX_CACHED_BUCKETS) : 1000;
|
|
||||||
|
|
||||||
export const validRestoreObjectTiers = new Set(['Expedited', 'Standard', 'Bulk']);
|
|
||||||
export const maxBatchingConcurrentOperations = 5;
|
|
||||||
|
|
||||||
/** For policy resource arn check we allow empty account ID to not break compatibility */
|
|
||||||
export const policyArnAllowedEmptyAccountId = ['utapi', 'scuba'];
|
|
194
lib/db.ts
|
@ -1,194 +0,0 @@
|
||||||
/**
|
|
||||||
* Like Error, but with a property set to true.
|
|
||||||
* TODO: this is copied from kineticlib, should consolidate with the
|
|
||||||
* future errors module
|
|
||||||
*
|
|
||||||
* Example: instead of:
|
|
||||||
* const err = new Error("input is not a buffer");
|
|
||||||
* err.badTypeInput = true;
|
|
||||||
* throw err;
|
|
||||||
* use:
|
|
||||||
* throw propError("badTypeInput", "input is not a buffer");
|
|
||||||
*
|
|
||||||
* @param propName - the property name.
|
|
||||||
* @param message - the Error message.
|
|
||||||
* @returns the Error object.
|
|
||||||
*/
|
|
||||||
function propError(propName: string, message: string): Error {
|
|
||||||
const err = new Error(message);
|
|
||||||
err[propName] = true;
|
|
||||||
// @ts-ignore
|
|
||||||
err.is = { [propName]: true };
|
|
||||||
return err;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Running transaction with multiple updates to be committed atomically
|
|
||||||
*/
|
|
||||||
export class IndexTransaction {
|
|
||||||
operations: { type: 'put' | 'del'; key: string; value?: any }[];
|
|
||||||
db: any;
|
|
||||||
closed: boolean;
|
|
||||||
conditions: { [key: string]: string }[];
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Builds a new transaction
|
|
||||||
*
|
|
||||||
* @argument {Leveldb} db an open database to which the updates
|
|
||||||
* will be applied
|
|
||||||
*
|
|
||||||
* @returns a new empty transaction
|
|
||||||
*/
|
|
||||||
constructor(db: any) {
|
|
||||||
this.operations = [];
|
|
||||||
this.db = db;
|
|
||||||
this.closed = false;
|
|
||||||
this.conditions = [];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Adds a new operation to participate in this running transaction
|
|
||||||
*
|
|
||||||
* @argument op an object with the following attributes:
|
|
||||||
* {
|
|
||||||
* type: 'put' or 'del',
|
|
||||||
* key: the object key,
|
|
||||||
* value: (optional for del) the value to store,
|
|
||||||
* }
|
|
||||||
*
|
|
||||||
* @throws an error described by the following properties
|
|
||||||
* - invalidTransactionVerb if op is not put or del
|
|
||||||
* - pushOnCommittedTransaction if already committed
|
|
||||||
* - missingKey if the key is missing from the op
|
|
||||||
* - missingValue if putting without a value
|
|
||||||
*/
|
|
||||||
push(op: { type: 'put'; key: string; value: any }): void;
|
|
||||||
push(op: { type: 'del'; key: string }): void;
|
|
||||||
push(op: { type: 'put' | 'del'; key: string; value?: any }): void {
|
|
||||||
if (this.closed) {
|
|
||||||
throw propError(
|
|
||||||
'pushOnCommittedTransaction',
|
|
||||||
'can not add ops to already committed transaction'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (op.type !== 'put' && op.type !== 'del') {
|
|
||||||
throw propError(
|
|
||||||
'invalidTransactionVerb',
|
|
||||||
`unknown action type: ${op.type}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (op.key === undefined) {
|
|
||||||
throw propError('missingKey', 'missing key');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (op.type === 'put' && op.value === undefined) {
|
|
||||||
throw propError('missingValue', 'missing value');
|
|
||||||
}
|
|
||||||
|
|
||||||
this.operations.push(op);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Adds a new put operation to this running transaction
|
|
||||||
*
|
|
||||||
* @argument {string} key - the key of the object to put
|
|
||||||
* @argument {string} value - the value to put
|
|
||||||
*
|
|
||||||
* @throws {Error} an error described by the following properties
|
|
||||||
* - pushOnCommittedTransaction if already committed
|
|
||||||
* - missingKey if the key is missing from the op
|
|
||||||
* - missingValue if putting without a value
|
|
||||||
* @see push
|
|
||||||
*/
|
|
||||||
put(key: string, value: any) {
|
|
||||||
this.push({ type: 'put', key, value });
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Adds a new del operation to this running transaction
|
|
||||||
*
|
|
||||||
* @argument key - the key of the object to delete
|
|
||||||
*
|
|
||||||
* @throws an error described by the following properties
|
|
||||||
* - pushOnCommittedTransaction if already committed
|
|
||||||
* - missingKey if the key is missing from the op
|
|
||||||
*
|
|
||||||
* @see push
|
|
||||||
*/
|
|
||||||
del(key: string) {
|
|
||||||
this.push({ type: 'del', key });
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Adds a condition for the transaction
|
|
||||||
*
|
|
||||||
* @argument condition an object with the following attributes:
|
|
||||||
* {
|
|
||||||
* <condition>: the object key
|
|
||||||
* }
|
|
||||||
* example: { notExists: 'key1' }
|
|
||||||
*
|
|
||||||
* @throws an error described by the following properties
|
|
||||||
* - pushOnCommittedTransaction if already committed
|
|
||||||
* - missingCondition if the condition is empty
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
addCondition(condition: { [key: string]: string }) {
|
|
||||||
if (this.closed) {
|
|
||||||
throw propError(
|
|
||||||
'pushOnCommittedTransaction',
|
|
||||||
'can not add conditions to already committed transaction'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (condition === undefined || Object.keys(condition).length === 0) {
|
|
||||||
throw propError(
|
|
||||||
'missingCondition',
|
|
||||||
'missing condition for conditional put'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if (typeof condition.notExists !== 'string' && typeof condition.exists !== 'string') {
|
|
||||||
throw propError(
|
|
||||||
'unsupportedConditionalOperation',
|
|
||||||
'missing key or supported condition'
|
|
||||||
);
|
|
||||||
}
|
|
||||||
this.conditions.push(condition);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Applies the queued updates in this transaction atomically.
|
|
||||||
*
|
|
||||||
* @argument cb function to be called when the commit
|
|
||||||
* finishes, taking an optional error argument
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
commit(cb: (error: Error | null, data?: any) => void) {
|
|
||||||
if (this.closed) {
|
|
||||||
return cb(
|
|
||||||
propError(
|
|
||||||
'alreadyCommitted',
|
|
||||||
'transaction was already committed'
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.operations.length === 0) {
|
|
||||||
return cb(
|
|
||||||
propError(
|
|
||||||
'emptyTransaction',
|
|
||||||
'tried to commit an empty transaction'
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.closed = true;
|
|
||||||
const options = { sync: true, conditions: this.conditions };
|
|
||||||
|
|
||||||
// The array-of-operations variant of the `batch` method
|
|
||||||
// allows passing options such has `sync: true` whereas the
|
|
||||||
// chained form does not.
|
|
||||||
return this.db.batch(this.operations, options, cb);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,11 +0,0 @@
|
||||||
export interface ErrorLike {
|
|
||||||
message: any;
|
|
||||||
code: any;
|
|
||||||
stack: any;
|
|
||||||
name: any;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function reshapeExceptionError(error: ErrorLike) {
|
|
||||||
const { message, code, stack, name } = error;
|
|
||||||
return { message, code, stack, name };
|
|
||||||
}
|
|
|
@ -1,175 +0,0 @@
|
||||||
import type { ServerResponse } from 'http';
|
|
||||||
import * as rawErrors from './arsenalErrors';
|
|
||||||
|
|
||||||
/** All possible errors names. */
|
|
||||||
export type Name = keyof typeof rawErrors;
|
|
||||||
/** Object containing all errors names. It has the format { [Name]: "Name" } */
|
|
||||||
export type Names = { [Name_ in Name]: Name_ };
|
|
||||||
/** Mapping used to determine an error type. It has the format { [Name]: boolean } */
|
|
||||||
export type Is = { [_ in Name]: boolean };
|
|
||||||
/** Mapping of all possible Errors. It has the format { [Name]: Error } */
|
|
||||||
export type Errors = { [_ in Name]: ArsenalError };
|
|
||||||
|
|
||||||
// This object is reused constantly through createIs, we store it there
|
|
||||||
// to avoid recomputation.
|
|
||||||
const isBase = Object.fromEntries(
|
|
||||||
Object.keys(rawErrors).map((key) => [key, false])
|
|
||||||
) as Is;
|
|
||||||
|
|
||||||
// This allows to conditionally add the old behavior of errors to properly
|
|
||||||
// test migration.
|
|
||||||
// Activate CI tests with `ALLOW_UNSAFE_ERROR_COMPARISON=false yarn test`.
|
|
||||||
// Remove this mechanism in ARSN-176.
|
|
||||||
export const allowUnsafeErrComp = (
|
|
||||||
process.env.ALLOW_UNSAFE_ERROR_COMPARISON ?? 'true') === 'true'
|
|
||||||
|
|
||||||
// This contains some metaprog. Be careful.
|
|
||||||
// Proxy can be found on MDN.
|
|
||||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Proxy
|
|
||||||
// While this could seems better to avoid metaprog, this allows us to enforce
|
|
||||||
// type-checking properly while avoiding all errors that could happen at runtime.
|
|
||||||
// Even if some errors are made in JavaScript, like using err.is.NonExistingError,
|
|
||||||
// the Proxy will return false.
|
|
||||||
const createIs = (type: Name): Is => {
|
|
||||||
const get = (is: Is, value: string | symbol) => is[value] ?? false;
|
|
||||||
const final = Object.freeze({ ...isBase, [type]: true });
|
|
||||||
return new Proxy(final, { get });
|
|
||||||
};
|
|
||||||
|
|
||||||
export class ArsenalError extends Error {
|
|
||||||
/** HTTP status code. Example: 401, 403, 500, ... */
|
|
||||||
#code: number;
|
|
||||||
/** Text description of the error. */
|
|
||||||
#description: string;
|
|
||||||
/** Type of the error. */
|
|
||||||
#type: Name;
|
|
||||||
/** Object used to determine the error type.
|
|
||||||
* Example: error.is.InternalError */
|
|
||||||
#is: Is;
|
|
||||||
/** A map of error metadata (can be extra fields
|
|
||||||
* that only show in debug mode) */
|
|
||||||
#metadata: Map<string, Object[]>;
|
|
||||||
|
|
||||||
private constructor(type: Name, code: number, description: string,
|
|
||||||
metadata?: Map<string, Object[]>) {
|
|
||||||
super(type);
|
|
||||||
this.#code = code;
|
|
||||||
this.#description = description;
|
|
||||||
this.#type = type;
|
|
||||||
this.#is = createIs(type);
|
|
||||||
this.#metadata = metadata ?? new Map<string, Object[]>();
|
|
||||||
|
|
||||||
// This restores the old behavior of errors, to make sure they're now
|
|
||||||
// backward-compatible. Fortunately it's handled by TS, but it cannot
|
|
||||||
// be type-checked. This means we have to be extremely careful about
|
|
||||||
// what we're doing when using errors.
|
|
||||||
// Disables the feature when in CI tests but not in production.
|
|
||||||
if (allowUnsafeErrComp) {
|
|
||||||
this[type] = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Output the error as a JSON string */
|
|
||||||
toString() {
|
|
||||||
const errorType = this.message;
|
|
||||||
const errorMessage = this.#description;
|
|
||||||
return JSON.stringify({ errorType, errorMessage });
|
|
||||||
}
|
|
||||||
|
|
||||||
flatten() {
|
|
||||||
return {
|
|
||||||
is_arsenal_error: true,
|
|
||||||
code: this.#code,
|
|
||||||
description: this.#description,
|
|
||||||
type: this.#type,
|
|
||||||
stack: this.stack
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static unflatten(flat_obj) {
|
|
||||||
if (!flat_obj.is_arsenal_error) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const err = new ArsenalError(
|
|
||||||
flat_obj.type,
|
|
||||||
flat_obj.code,
|
|
||||||
flat_obj.description
|
|
||||||
)
|
|
||||||
err.stack = flat_obj.stack
|
|
||||||
return err;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Write the error in an HTTP response */
|
|
||||||
writeResponse(res: ServerResponse) {
|
|
||||||
res.writeHead(this.#code);
|
|
||||||
const asStr = this.toString();
|
|
||||||
res.end(asStr);
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Clone the error with a new description.*/
|
|
||||||
customizeDescription(description: string): ArsenalError {
|
|
||||||
const type = this.#type;
|
|
||||||
const code = this.#code;
|
|
||||||
const metadata = new Map(this.#metadata);
|
|
||||||
const err = new ArsenalError(type, code, description, metadata);
|
|
||||||
err.stack = this.stack;
|
|
||||||
return err;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Clone the error with a new metadata field */
|
|
||||||
addMetadataEntry(key: string, value: Object[]): ArsenalError {
|
|
||||||
const type = this.#type;
|
|
||||||
const code = this.#code;
|
|
||||||
const description = this.#description;
|
|
||||||
const metadata = new Map(this.#metadata);
|
|
||||||
metadata.set(key, value);
|
|
||||||
const err = new ArsenalError(type, code, description, metadata);
|
|
||||||
err.stack = this.stack;
|
|
||||||
return err;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Used to determine the error type. Example: error.is.InternalError */
|
|
||||||
get is() {
|
|
||||||
return this.#is;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** HTTP status code. Example: 401, 403, 500, ... */
|
|
||||||
get code() {
|
|
||||||
return this.#code;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Text description of the error. */
|
|
||||||
get description() {
|
|
||||||
return this.#description;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Type of the error, belonging to Name. is should be prefered instead of
|
|
||||||
* type in a daily-basis, but type remains accessible for future use. */
|
|
||||||
get type() {
|
|
||||||
return this.#type;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** A map of error metadata */
|
|
||||||
get metadata() {
|
|
||||||
return this.#metadata;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Generate all possible errors. An instance is created by default. */
|
|
||||||
static errors() {
|
|
||||||
const errors = {};
|
|
||||||
Object.entries(rawErrors).forEach((value) => {
|
|
||||||
const name = value[0] as Name;
|
|
||||||
const error = value[1];
|
|
||||||
const { code, description } = error;
|
|
||||||
const get = () => new ArsenalError(name, code, description);
|
|
||||||
Object.defineProperty(errors, name, { get });
|
|
||||||
});
|
|
||||||
return errors as Errors;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Mapping of all possible Errors.
|
|
||||||
* Use them with errors[error].customizeDescription for any customization. */
|
|
||||||
export default ArsenalError.errors();
|
|
|
@ -1,20 +0,0 @@
|
||||||
# Get Pensieve Credentials Executable
|
|
||||||
|
|
||||||
## To make executable file from getPensieveCreds.js
|
|
||||||
|
|
||||||
`npm install -g pkg`
|
|
||||||
`pkg getPensieveCreds.js`
|
|
||||||
|
|
||||||
This will build a mac, linux and windows file.
|
|
||||||
If you just want linux, for example:
|
|
||||||
`pkg getPensieveCreds.js --targets node6-linux-x64`
|
|
||||||
|
|
||||||
For further options, see https://github.com/zeit/pkg
|
|
||||||
|
|
||||||
## To run the executable file
|
|
||||||
|
|
||||||
Call the output executable file with an
|
|
||||||
argument that names the service you
|
|
||||||
are trying to get credentials for (e.g., clueso):
|
|
||||||
|
|
||||||
`./getPensieveCreds-linux serviceName`
|
|
|
@ -1,45 +0,0 @@
|
||||||
const async = require('async');
|
|
||||||
const MetadataFileClient =
|
|
||||||
require('../../storage/metadata/file/MetadataFileClient');
|
|
||||||
const mdClient = new MetadataFileClient({
|
|
||||||
host: 's3-metadata',
|
|
||||||
port: '9993',
|
|
||||||
});
|
|
||||||
const { loadOverlayVersion, parseServiceCredentials } = require('./utils');
|
|
||||||
|
|
||||||
const serviceName = process.argv[2];
|
|
||||||
if (serviceName === undefined) {
|
|
||||||
throw new Error('Missing service name (e.g., clueso)');
|
|
||||||
}
|
|
||||||
const tokenKey = 'auth/zenko/remote-management-token';
|
|
||||||
|
|
||||||
const mdDb = mdClient.openDB(error => {
|
|
||||||
if (error) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
|
|
||||||
const db = mdDb.openSub('PENSIEVE');
|
|
||||||
return async.waterfall([
|
|
||||||
cb => db.get('configuration/overlay-version', {}, cb),
|
|
||||||
(version, cb) => loadOverlayVersion(db, version, cb),
|
|
||||||
(conf, cb) => db.get(tokenKey, {}, (err, instanceAuth) => {
|
|
||||||
if (err) {
|
|
||||||
return cb(err);
|
|
||||||
}
|
|
||||||
const creds = parseServiceCredentials(conf, instanceAuth,
|
|
||||||
serviceName);
|
|
||||||
return cb(null, creds);
|
|
||||||
}),
|
|
||||||
], (err, creds) => {
|
|
||||||
db.disconnect();
|
|
||||||
if (err) {
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
if (!creds) {
|
|
||||||
throw new Error('No credentials found');
|
|
||||||
}
|
|
||||||
process.stdout.write(`export AWS_ACCESS_KEY_ID="${creds.accessKey}"\n`);
|
|
||||||
process.stdout
|
|
||||||
.write(`export AWS_SECRET_ACCESS_KEY="${creds.secretKey}"`);
|
|
||||||
});
|
|
||||||
});
|
|
|
@ -1,14 +0,0 @@
|
||||||
{
|
|
||||||
"name": "pensievecreds",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"description": "Executable tool for Pensieve",
|
|
||||||
"main": "getPensieveCreds.js",
|
|
||||||
"scripts": {
|
|
||||||
"test": "mocha --recursive --timeout 5500 tests/unit"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"mocha": "5.2.0",
|
|
||||||
"async": "~2.6.1",
|
|
||||||
"node-forge": "^0.7.1"
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,7 +0,0 @@
|
||||||
{
|
|
||||||
"privateKey": "-----BEGIN RSA PRIVATE KEY-----\r\nMIIEowIBAAKCAQEAj13sSYE40lAX2qpBvfdGfcSVNtBf8i5FH+E8FAhORwwPu+2S\r\n3yBQbgwHq30WWxunGb1NmZL1wkVZ+vf12DtxqFRnMA08LfO4oO6oC4V8XfKeuHyJ\r\n1qlaKRINz6r9yDkTHtwWoBnlAINurlcNKgGD5p7D+G26Chbr/Oo0ZwHula9DxXy6\r\neH8/bJ5/BynyNyyWRPoAO+UkUdY5utkFCUq2dbBIhovMgjjikf5p2oWqnRKXc+JK\r\nBegr6lSHkkhyqNhTmd8+wA+8Cace4sy1ajY1t5V4wfRZea5vwl/HlyyKodvHdxng\r\nJgg6H61JMYPkplY6Gr9OryBKEAgq02zYoYTDfwIDAQABAoIBAAuDYGlavkRteCzw\r\nRU1LIVcSRWVcgIgDXTu9K8T0Ec0008Kkxomyn6LmxmroJbZ1VwsDH8s4eRH73ckA\r\nxrZxt6Pr+0lplq6eBvKtl8MtGhq1VDe+kJczjHEF6SQHOFAu/TEaPZrn2XMcGvRX\r\nO1BnRL9tepFlxm3u/06VRFYNWqqchM+tFyzLu2AuiuKd5+slSX7KZvVgdkY1ErKH\r\ngB75lPyhPb77C/6ptqUisVMSO4JhLhsD0+ekDVY982Sb7KkI+szdWSbtMx9Ek2Wo\r\ntXwJz7I8T7IbODy9aW9G+ydyhMDFmaEYIaDVFKJj5+fluNza3oQ5PtFNVE50GQJA\r\nsisGqfECgYEAwpkwt0KpSamSEH6qknNYPOwxgEuXWoFVzibko7is2tFPvY+YJowb\r\n68MqHIYhf7gHLq2dc5Jg1TTbGqLECjVxp4xLU4c95KBy1J9CPAcuH4xQLDXmeLzP\r\nJ2YgznRocbzAMCDAwafCr3uY9FM7oGDHAi5bE5W11xWx+9MlFExL3JkCgYEAvJp5\r\nf+JGN1W037bQe2QLYUWGszewZsvplnNOeytGQa57w4YdF42lPhMz6Kc/zdzKZpN9\r\njrshiIDhAD5NCno6dwqafBAW9WZl0sn7EnlLhD4Lwm8E9bRHnC9H82yFuqmNrzww\r\nzxBCQogJISwHiVz4EkU48B283ecBn0wT/fAa19cCgYEApKWsnEHgrhy1IxOpCoRh\r\nUhqdv2k1xDPN/8DUjtnAFtwmVcLa/zJopU/Zn4y1ZzSzjwECSTi+iWZRQ/YXXHPf\r\nl92SFjhFW92Niuy8w8FnevXjF6T7PYiy1SkJ9OR1QlZrXc04iiGBDazLu115A7ce\r\nanACS03OLw+CKgl6Q/RR83ECgYBCUngDVoimkMcIHHt3yJiP3ikeAKlRnMdJlsa0\r\nXWVZV4hCG3lDfRXsnEgWuimftNKf+6GdfYSvQdLdiQsCcjT5A4uLsQTByv5nf4uA\r\n1ZKOsFrmRrARzxGXhLDikvj7yP//7USkq+0BBGFhfuAvl7fMhPceyPZPehqB7/jf\r\nxX1LBQKBgAn5GgSXzzS0e06ZlP/VrKxreOHa5Z8wOmqqYQ0QTeczAbNNmuITdwwB\r\nNkbRqpVXRIfuj0BQBegAiix8om1W4it0cwz54IXBwQULxJR1StWxj3jo4QtpMQ+z\r\npVPdB1Ilb9zPV1YvDwRfdS1xsobzznAx56ecsXduZjs9mF61db8Q\r\n-----END RSA PRIVATE KEY-----\r\n",
|
|
||||||
"publicKey": "-----BEGIN PUBLIC KEY-----\r\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAj13sSYE40lAX2qpBvfdG\r\nfcSVNtBf8i5FH+E8FAhORwwPu+2S3yBQbgwHq30WWxunGb1NmZL1wkVZ+vf12Dtx\r\nqFRnMA08LfO4oO6oC4V8XfKeuHyJ1qlaKRINz6r9yDkTHtwWoBnlAINurlcNKgGD\r\n5p7D+G26Chbr/Oo0ZwHula9DxXy6eH8/bJ5/BynyNyyWRPoAO+UkUdY5utkFCUq2\r\ndbBIhovMgjjikf5p2oWqnRKXc+JKBegr6lSHkkhyqNhTmd8+wA+8Cace4sy1ajY1\r\nt5V4wfRZea5vwl/HlyyKodvHdxngJgg6H61JMYPkplY6Gr9OryBKEAgq02zYoYTD\r\nfwIDAQAB\r\n-----END PUBLIC KEY-----\r\n",
|
|
||||||
"accessKey": "QXP3VDG3SALNBX2QBJ1C",
|
|
||||||
"secretKey": "K5FyqZo5uFKfw9QBtn95o6vuPuD0zH/1seIrqPKqGnz8AxALNSx6EeRq7G1I6JJpS1XN13EhnwGn2ipsml3Uf2fQ00YgEmImG8wzGVZm8fWotpVO4ilN4JGyQCah81rNX4wZ9xHqDD7qYR5MyIERxR/osoXfctOwY7GGUjRKJfLOguNUlpaovejg6mZfTvYAiDF+PTO1sKUYqHt1IfKQtsK3dov1EFMBB5pWM7sVfncq/CthKN5M+VHx9Y87qdoP3+7AW+RCBbSDOfQgxvqtS7PIAf10mDl8k2kEURLz+RqChu4O4S0UzbEmtja7wa7WYhYKv/tM/QeW7kyNJMmnPg==",
|
|
||||||
"decryptedSecretKey": "n7PSZ3U6SgerF9PCNhXYsq3S3fRKVGdZTicGV8Ur"
|
|
||||||
}
|
|
|
@ -1,39 +0,0 @@
|
||||||
const assert = require('assert');
|
|
||||||
const { parseServiceCredentials, decryptSecret } =
|
|
||||||
require('../../utils');
|
|
||||||
const { privateKey, accessKey, secretKey, decryptedSecretKey }
|
|
||||||
= require('../resources.json');
|
|
||||||
|
|
||||||
describe('decyrptSecret', () => {
|
|
||||||
it('should decrypt a secret', () => {
|
|
||||||
const instanceCredentials = {
|
|
||||||
privateKey,
|
|
||||||
};
|
|
||||||
const result = decryptSecret(instanceCredentials, secretKey);
|
|
||||||
assert.strictEqual(result, decryptedSecretKey);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('parseServiceCredentials', () => {
|
|
||||||
const conf = {
|
|
||||||
users: [{ accessKey,
|
|
||||||
accountType: 'service-clueso',
|
|
||||||
secretKey,
|
|
||||||
userName: 'Search Service Account' }],
|
|
||||||
};
|
|
||||||
const auth = JSON.stringify({ privateKey });
|
|
||||||
|
|
||||||
it('should parse service credentials', () => {
|
|
||||||
const result = parseServiceCredentials(conf, auth, 'clueso');
|
|
||||||
const expectedResult = {
|
|
||||||
accessKey,
|
|
||||||
secretKey: decryptedSecretKey,
|
|
||||||
};
|
|
||||||
assert.deepStrictEqual(result, expectedResult);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return undefined if no such service', () => {
|
|
||||||
const result = parseServiceCredentials(conf, auth, undefined);
|
|
||||||
assert.strictEqual(result, undefined);
|
|
||||||
});
|
|
||||||
});
|
|
|
@ -1,38 +0,0 @@
|
||||||
const forge = require('node-forge');
|
|
||||||
|
|
||||||
function decryptSecret(instanceCredentials, secret) {
|
|
||||||
const privateKey = forge.pki.privateKeyFromPem(
|
|
||||||
instanceCredentials.privateKey);
|
|
||||||
const encryptedSecretKey = forge.util.decode64(secret);
|
|
||||||
return privateKey.decrypt(encryptedSecretKey, 'RSA-OAEP', {
|
|
||||||
md: forge.md.sha256.create(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function loadOverlayVersion(db, version, cb) {
|
|
||||||
db.get(`configuration/overlay/${version}`, {}, (err, val) => {
|
|
||||||
if (err) {
|
|
||||||
return cb(err);
|
|
||||||
}
|
|
||||||
return cb(null, JSON.parse(val));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseServiceCredentials(conf, auth, serviceName) {
|
|
||||||
const instanceAuth = JSON.parse(auth);
|
|
||||||
const serviceAccount = (conf.users || []).find(
|
|
||||||
u => u.accountType === `service-${serviceName}`);
|
|
||||||
if (!serviceAccount) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
accessKey: serviceAccount.accessKey,
|
|
||||||
secretKey: decryptSecret(instanceAuth, serviceAccount.secretKey),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
decryptSecret,
|
|
||||||
loadOverlayVersion,
|
|
||||||
parseServiceCredentials,
|
|
||||||
};
|
|
|
@ -1,28 +0,0 @@
|
||||||
export const ciphers = [
|
|
||||||
'DHE-RSA-AES128-GCM-SHA256',
|
|
||||||
'ECDHE-ECDSA-AES128-GCM-SHA256',
|
|
||||||
'ECDHE-RSA-AES256-GCM-SHA384',
|
|
||||||
'ECDHE-ECDSA-AES256-GCM-SHA384',
|
|
||||||
'DHE-RSA-AES128-GCM-SHA256',
|
|
||||||
'ECDHE-RSA-AES128-SHA256',
|
|
||||||
'DHE-RSA-AES128-SHA256',
|
|
||||||
'ECDHE-RSA-AES256-SHA384',
|
|
||||||
'DHE-RSA-AES256-SHA384',
|
|
||||||
'ECDHE-RSA-AES256-SHA256',
|
|
||||||
'DHE-RSA-AES256-SHA256',
|
|
||||||
'HIGH',
|
|
||||||
'!aNULL',
|
|
||||||
'!eNULL',
|
|
||||||
'!EXPORT',
|
|
||||||
'!DES',
|
|
||||||
'!RC4',
|
|
||||||
'!MD5',
|
|
||||||
'!SHA1',
|
|
||||||
'!PSK',
|
|
||||||
'!aECDH',
|
|
||||||
'!SRP',
|
|
||||||
'!IDEA',
|
|
||||||
'!EDH-DSS-DES-CBC3-SHA',
|
|
||||||
'!EDH-RSA-DES-CBC3-SHA',
|
|
||||||
'!KRB5-DES-CBC3-SHA',
|
|
||||||
].join(':');
|
|