Compare commits
13 Commits
8829793597
...
3dba9b6c4f
Author | SHA1 | Date |
---|---|---|
Taylor McKinnon | 3dba9b6c4f | |
Taylor McKinnon | 87f10044d7 | |
Taylor McKinnon | ca5ecfb04b | |
bert-e | 6ea2e75b23 | |
Taylor McKinnon | 5bd1b51fb0 | |
Taylor McKinnon | d9f321ef8e | |
Taylor McKinnon | ac8010e27e | |
Taylor McKinnon | 5e93388c79 | |
Taylor McKinnon | aa0b12b021 | |
Taylor McKinnon | 7785b9c9bf | |
Taylor McKinnon | 4997d9e8ee | |
Taylor McKinnon | 60d70b3c10 | |
= | eca8b81a28 |
|
@ -0,0 +1,276 @@
|
|||
#! /usr/bin/env node
|
||||
|
||||
// TODO
|
||||
// - deduplicate with Vault's seed script at https://github.com/scality/Vault/pull/1627
|
||||
// - add permission boundaries to user when https://scality.atlassian.net/browse/VAULT-4 is implemented
|
||||
|
||||
const { errors } = require('arsenal');
|
||||
const program = require('commander');
|
||||
const werelogs = require('werelogs');
|
||||
const async = require('async');
|
||||
const { IAM } = require('aws-sdk');
|
||||
const { version } = require('../package.json');
|
||||
|
||||
const systemPrefix = '/scality-internal/';
|
||||
|
||||
function generateUserPolicyDocument() {
|
||||
return {
|
||||
Version: '2012-10-17',
|
||||
Statement: {
|
||||
Effect: 'Allow',
|
||||
Action: 'utapi:ListMetrics',
|
||||
Resource: 'arn:scality:utapi:::*/*',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createIAMClient(opts) {
|
||||
return new IAM({
|
||||
endpoint: opts.iamEndpoint,
|
||||
});
|
||||
}
|
||||
|
||||
function needsCreation(v) {
|
||||
if (Array.isArray(v)) {
|
||||
return !v.length;
|
||||
}
|
||||
|
||||
return !v;
|
||||
}
|
||||
|
||||
class BaseHandler {
|
||||
constructor(serviceName, iamClient, log) {
|
||||
this.serviceName = serviceName;
|
||||
this.iamClient = iamClient;
|
||||
this.log = log;
|
||||
}
|
||||
|
||||
applyWaterfall(values, done) {
|
||||
this.log.debug('applyWaterfall', { values, type: this.resourceType });
|
||||
|
||||
const v = values[this.resourceType];
|
||||
|
||||
if (needsCreation(v)) {
|
||||
this.log.debug('creating', { v, type: this.resourceType });
|
||||
return this.create(values)
|
||||
.then(res =>
|
||||
done(null, Object.assign(values, {
|
||||
[this.resourceType]: res,
|
||||
})))
|
||||
.catch(done);
|
||||
}
|
||||
|
||||
this.log.debug('conflicts check', { v, type: this.resourceType });
|
||||
if (this.conflicts(v)) {
|
||||
return done(errors.EntityAlreadyExists.customizeDescription(
|
||||
`${this.resourceType} ${this.serviceName} already exists and conflicts with the expected value.`));
|
||||
}
|
||||
|
||||
this.log.debug('nothing to do', { v, type: this.resourceType });
|
||||
return done(null, values);
|
||||
}
|
||||
}
|
||||
|
||||
class UserHandler extends BaseHandler {
|
||||
get resourceType() {
|
||||
return 'user';
|
||||
}
|
||||
|
||||
collect() {
|
||||
return this.iamClient.getUser({
|
||||
UserName: this.serviceName,
|
||||
})
|
||||
.promise()
|
||||
.then(res => res.User);
|
||||
}
|
||||
|
||||
create(allResources) {
|
||||
return this.iamClient.createUser({
|
||||
UserName: this.serviceName,
|
||||
Path: systemPrefix,
|
||||
})
|
||||
.promise()
|
||||
.then(res => res.User);
|
||||
}
|
||||
|
||||
conflicts(u) {
|
||||
return u.Path !== systemPrefix;
|
||||
}
|
||||
}
|
||||
|
||||
class PolicyHandler extends BaseHandler {
|
||||
get resourceType() {
|
||||
return 'policy';
|
||||
}
|
||||
|
||||
collect() {
|
||||
return this.iamClient.listPolicies({
|
||||
MaxItems: 100,
|
||||
OnlyAttached: false,
|
||||
Scope: 'All',
|
||||
})
|
||||
.promise()
|
||||
.then(res => res.Policies.find(p => p.PolicyName === this.serviceName));
|
||||
}
|
||||
|
||||
create(allResources) {
|
||||
const doc = generateUserPolicyDocument();
|
||||
|
||||
return this.iamClient.createPolicy({
|
||||
PolicyName: this.serviceName,
|
||||
PolicyDocument: JSON.stringify(doc),
|
||||
Path: systemPrefix,
|
||||
})
|
||||
.promise()
|
||||
.then(res => res.Policy);
|
||||
}
|
||||
|
||||
conflicts(p) {
|
||||
return p.Path !== systemPrefix;
|
||||
}
|
||||
}
|
||||
|
||||
class PolicyAttachmentHandler extends BaseHandler {
|
||||
get resourceType() {
|
||||
return 'policyAttachment';
|
||||
}
|
||||
|
||||
collect() {
|
||||
return this.iamClient.listAttachedUserPolicies({
|
||||
UserName: this.serviceName,
|
||||
MaxItems: 100,
|
||||
})
|
||||
.promise()
|
||||
.then(res => res.AttachedPolicies)
|
||||
}
|
||||
|
||||
create(allResources) {
|
||||
return this.iamClient.attachUserPolicy({
|
||||
PolicyArn: allResources.policy.Arn,
|
||||
UserName: this.serviceName,
|
||||
})
|
||||
.promise();
|
||||
}
|
||||
|
||||
conflicts(p) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
class AccessKeyHandler extends BaseHandler {
|
||||
get resourceType() {
|
||||
return 'accessKey';
|
||||
}
|
||||
|
||||
collect() {
|
||||
return this.iamClient.listAccessKeys({
|
||||
UserName: this.serviceName,
|
||||
MaxItems: 100,
|
||||
})
|
||||
.promise()
|
||||
.then(res => res.AccessKeyMetadata)
|
||||
}
|
||||
|
||||
create(allResources) {
|
||||
return this.iamClient.createAccessKey({
|
||||
UserName: this.serviceName,
|
||||
})
|
||||
.promise()
|
||||
.then(res => res.AccessKey);
|
||||
}
|
||||
|
||||
conflicts(a) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function collectResource(v, done) {
|
||||
v.collect()
|
||||
.then(res => done(null, res))
|
||||
.catch(err => {
|
||||
if (err.code === 'NoSuchEntity') {
|
||||
return done(null, null);
|
||||
}
|
||||
|
||||
done(err);
|
||||
});
|
||||
}
|
||||
|
||||
function collectResourcesFromHandlers(handlers, cb) {
|
||||
const tasks = handlers.reduce((acc, v) => ({
|
||||
[v.resourceType]: done => collectResource(v, done),
|
||||
...acc,
|
||||
}), {});
|
||||
async.parallel(tasks, cb);
|
||||
}
|
||||
|
||||
function buildServiceUserHandlers(serviceName, client, log) {
|
||||
return [
|
||||
UserHandler,
|
||||
PolicyHandler,
|
||||
PolicyAttachmentHandler,
|
||||
AccessKeyHandler,
|
||||
].map(h => new h(serviceName, client, log));
|
||||
}
|
||||
|
||||
function apply(client, serviceName, log, cb) {
|
||||
const handlers = buildServiceUserHandlers(serviceName, client, log);
|
||||
|
||||
async.waterfall([
|
||||
done => collectResourcesFromHandlers(handlers, done),
|
||||
...handlers.map(h => h.applyWaterfall.bind(h)),
|
||||
(values, done) => done(null, values.accessKey),
|
||||
], cb);
|
||||
}
|
||||
|
||||
function wrapAction(actionFunc, serviceName, options) {
|
||||
werelogs.configure({
|
||||
level: options.logLevel,
|
||||
dump: options.logDumpLevel,
|
||||
});
|
||||
|
||||
const log = new werelogs.Logger(process.argv[1]).newRequestLogger();
|
||||
const client = createIAMClient(options);
|
||||
|
||||
actionFunc(client, serviceName, log, (err, data) => {
|
||||
if (err) {
|
||||
log.error('failed', {
|
||||
data,
|
||||
error: err,
|
||||
});
|
||||
if (err.EntityAlreadyExists) {
|
||||
log.error(`run "${process.argv[1]} purge ${serviceName}" to fix.`);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
log.info('success', { data });
|
||||
process.exit();
|
||||
});
|
||||
}
|
||||
|
||||
program.version(version);
|
||||
|
||||
[
|
||||
{
|
||||
name: 'apply <service-name>',
|
||||
actionFunc: apply,
|
||||
},
|
||||
].forEach(cmd => {
|
||||
program
|
||||
.command(cmd.name)
|
||||
.option('--iam-endpoint <url>', 'IAM endpoint', 'http://localhost:8600')
|
||||
.option('--log-level <level>', 'log level', 'info')
|
||||
.option('--log-dump-level <level>', 'log level that triggers a dump of the debug buffer', 'error')
|
||||
.action(wrapAction.bind(null, cmd.actionFunc));
|
||||
});
|
||||
|
||||
const validCommands = program.commands.map(n => n._name);
|
||||
|
||||
// Is the command given invalid or are there too few arguments passed
|
||||
if (!validCommands.includes(process.argv[2])) {
|
||||
program.outputHelp();
|
||||
process.stdout.write('\n');
|
||||
process.exit(1);
|
||||
} else {
|
||||
program.parse(process.argv);
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
version: '3.8'
|
||||
|
||||
x-models:
|
||||
warp10: &warp10
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./images/warp10/Dockerfile
|
||||
volumes: [ $PWD/warpscript:/usr/local/share/warpscript ]
|
||||
|
||||
warp10_env: &warp10_env
|
||||
ENABLE_WARPSTUDIO: 'true'
|
||||
ENABLE_SENSISION: 'true'
|
||||
warpscript.repository.refresh: 1000
|
||||
warpscript.maxops: 1000000000
|
||||
warpscript.maxops.hard: 1000000000
|
||||
warpscript.maxfetch: 1000000000
|
||||
warpscript.maxfetch.hard: 1000000000
|
||||
warpscript.extension.debug: io.warp10.script.ext.debug.DebugWarpScriptExtension
|
||||
warpscript.maxrecursion: 1000
|
||||
warpscript.repository.directory: /usr/local/share/warpscript
|
||||
warpscript.extension.logEvent: io.warp10.script.ext.logging.LoggingWarpScriptExtension
|
||||
|
||||
redis: &redis
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./images/redis/Dockerfile
|
||||
|
||||
services:
|
||||
redis:
|
||||
<< : *redis
|
||||
ports:
|
||||
- 6379:6379
|
||||
- 9121:9121
|
||||
|
||||
warp10:
|
||||
<< : *warp10
|
||||
environment:
|
||||
<< : *warp10_env
|
||||
|
||||
ports:
|
||||
- 4802:4802
|
||||
- 8081:8081
|
||||
- 9718:9718
|
||||
|
||||
volumes:
|
||||
- /tmp/warp10:/data
|
||||
- '${PWD}/warpscript:/usr/local/share/warpscript'
|
63
eve/main.yml
63
eve/main.yml
|
@ -22,7 +22,9 @@ models:
|
|||
warp10:
|
||||
context: '.'
|
||||
dockerfile: 'images/warp10/Dockerfile'
|
||||
vault: eve/workers/mocks/vault
|
||||
redis:
|
||||
context: '.'
|
||||
dockerfile: 'images/redis/Dockerfile'
|
||||
- Upload: &upload_artifacts
|
||||
source: /artifacts
|
||||
urls:
|
||||
|
@ -33,6 +35,28 @@ models:
|
|||
command: |
|
||||
git lfs pull
|
||||
haltOnFailure: True
|
||||
- ShellCommand: &wait_for_vault
|
||||
name: Wait for Vault
|
||||
command: |
|
||||
bash -c "
|
||||
set -ex
|
||||
bash tests/utils/wait_for_local_port.bash 8500 60"
|
||||
haltOnFailure: true
|
||||
logfiles:
|
||||
vault:
|
||||
filename: "/artifacts/vault.log"
|
||||
follow: true
|
||||
- ShellCommand: &wait_for_warp10
|
||||
name: Wait for Warp 10
|
||||
command: |
|
||||
bash -c "
|
||||
set -ex
|
||||
bash tests/utils/wait_for_local_port.bash 4802 60"
|
||||
haltOnFailure: true
|
||||
logfiles:
|
||||
warp10:
|
||||
filename: "/artifacts/warp10.log"
|
||||
follow: true
|
||||
|
||||
stages:
|
||||
pre-merge:
|
||||
|
@ -105,6 +129,8 @@ stages:
|
|||
- ShellCommand:
|
||||
name: run cron tests
|
||||
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash false ft_test:cron
|
||||
env:
|
||||
REINDEX_PYTHON_INTERPRETER: python3
|
||||
logfiles:
|
||||
utapi:
|
||||
filename: "/artifacts/setup_ft_test:cron.log"
|
||||
|
@ -129,21 +155,14 @@ stages:
|
|||
steps:
|
||||
- Git: *clone
|
||||
- ShellCommand: *fetch_lfs
|
||||
- ShellCommand:
|
||||
name: Wait for Warp 10
|
||||
command: |
|
||||
bash -c "
|
||||
set -ex
|
||||
bash tests/utils/wait_for_local_port.bash 4802 60"
|
||||
logfiles:
|
||||
warp10:
|
||||
filename: "/artifacts/warp10.log"
|
||||
follow: true
|
||||
- ShellCommand: *wait_for_vault
|
||||
- ShellCommand: *wait_for_warp10
|
||||
- ShellCommand:
|
||||
name: run v2 functional tests
|
||||
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash true ft_test:v2
|
||||
env:
|
||||
UTAPI_CACHE_BACKEND: redis
|
||||
UTAPI_SERVICE_USER_ENABLED: 'true'
|
||||
UTAPI_LOG_LEVEL: trace
|
||||
SETUP_CMD: "run start_v2:server"
|
||||
logfiles:
|
||||
|
@ -153,6 +172,9 @@ stages:
|
|||
utapi:
|
||||
filename: "/artifacts/setup_ft_test:v2.log"
|
||||
follow: true
|
||||
vault:
|
||||
filename: "/artifacts/vault.log"
|
||||
follow: true
|
||||
run-v2-limit-tests:
|
||||
worker:
|
||||
<< : *workspace
|
||||
|
@ -161,16 +183,8 @@ stages:
|
|||
steps:
|
||||
- Git: *clone
|
||||
- ShellCommand: *fetch_lfs
|
||||
- ShellCommand:
|
||||
name: Wait for Warp 10
|
||||
command: |
|
||||
bash -c "
|
||||
set -ex
|
||||
bash tests/utils/wait_for_local_port.bash 4802 60"
|
||||
logfiles:
|
||||
warp10:
|
||||
filename: "/artifacts/warp10.log"
|
||||
follow: true
|
||||
- ShellCommand: *wait_for_vault
|
||||
- ShellCommand: *wait_for_warp10
|
||||
- ShellCommand:
|
||||
name: run v2 soft limit tests
|
||||
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash true ft_test:softLimit
|
||||
|
@ -184,6 +198,10 @@ stages:
|
|||
follow: true
|
||||
utapi:
|
||||
filename: "/artifacts/setup_ft_test:softLimit.log"
|
||||
follow: true
|
||||
vault:
|
||||
filename: "/artifacts/vault.log"
|
||||
follow: true
|
||||
- ShellCommand:
|
||||
name: run v2 hard limit tests
|
||||
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash true ft_test:hardLimit
|
||||
|
@ -198,4 +216,7 @@ stages:
|
|||
utapi:
|
||||
filename: "/artifacts/setup_ft_test:hardLimit.log"
|
||||
follow: true
|
||||
vault:
|
||||
filename: "/artifacts/vault.log"
|
||||
follow: true
|
||||
- Upload: *upload_artifacts
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
FROM node:alpine
|
||||
|
||||
ADD ./vault.js /usr/share/src/
|
||||
|
||||
WORKDIR /usr/share/src/
|
||||
|
||||
CMD node vault.js
|
|
@ -1,66 +0,0 @@
|
|||
const http = require('http');
|
||||
const url = require('url');
|
||||
|
||||
const port = process.env.VAULT_PORT || 8500;
|
||||
|
||||
const unauthResp = {
|
||||
ErrorResponse: {
|
||||
$: {
|
||||
xmlns: 'https://iam.amazonaws.com/doc/2010-05-08/',
|
||||
},
|
||||
Error: {
|
||||
Code: 'InvalidAccessKeyId',
|
||||
Message: 'The AWS access key Id you provided does not exist in our records.',
|
||||
},
|
||||
RequestId: '97f22e2dba45bca2a5cd:fb375c22ed4ea7500691',
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
class Vault {
|
||||
constructor() {
|
||||
this._server = null;
|
||||
}
|
||||
|
||||
static _onRequest(req, res) {
|
||||
const { query } = url.parse(req.url, true);
|
||||
if (query.accessKey === 'invalidKey') {
|
||||
res.writeHead(403);
|
||||
res.write(JSON.stringify(unauthResp));
|
||||
} else if (query.Action === 'AccountsCanonicalIds') {
|
||||
res.writeHead(200);
|
||||
let body;
|
||||
if (Array.isArray(query.accountIds)) {
|
||||
body = query.accountIds.map(id => ({
|
||||
accountId: id,
|
||||
canonicalId: id.split(':')[1],
|
||||
}));
|
||||
} else {
|
||||
body = [{
|
||||
accountId: query.accountIds,
|
||||
canonicalId: query.accountIds.split(':')[1],
|
||||
}];
|
||||
}
|
||||
res.write(JSON.stringify(body));
|
||||
}
|
||||
return res.end();
|
||||
}
|
||||
|
||||
start() {
|
||||
this._server = http.createServer(Vault._onRequest).listen(port);
|
||||
}
|
||||
|
||||
end() {
|
||||
this._server.close();
|
||||
}
|
||||
}
|
||||
|
||||
const vault = new Vault();
|
||||
|
||||
['SIGINT', 'SIGQUIT', 'SIGTERM'].forEach(eventName => {
|
||||
process.on(eventName, () => process.exit(0));
|
||||
});
|
||||
|
||||
// eslint-disable-next-line no-console
|
||||
console.log('Starting Vault Mock...');
|
||||
vault.start();
|
|
@ -15,11 +15,9 @@ spec:
|
|||
cpu: 500m
|
||||
memory: 1Gi
|
||||
limits:
|
||||
cpu: "2"
|
||||
cpu: 1500m
|
||||
memory: 3Gi
|
||||
volumeMounts:
|
||||
- mountPath: /var/run/docker.sock
|
||||
name: docker-socket
|
||||
- name: artifacts
|
||||
readOnly: false
|
||||
mountPath: /artifacts
|
||||
|
@ -38,30 +36,99 @@ spec:
|
|||
value: 't'
|
||||
resources:
|
||||
requests:
|
||||
cpu: 500m
|
||||
cpu: 300m
|
||||
memory: 1Gi
|
||||
limits:
|
||||
cpu: 1750m
|
||||
cpu: 1000m
|
||||
memory: 3Gi
|
||||
volumeMounts:
|
||||
- name: artifacts
|
||||
readOnly: false
|
||||
mountPath: /artifacts
|
||||
{% if vars.vault is defined and vars.vault == 'enabled' %}
|
||||
- name: vault
|
||||
image: "{{ images.vault }}"
|
||||
- name: redis
|
||||
image: "{{ images.redis }}"
|
||||
command:
|
||||
- sh
|
||||
- -ce
|
||||
- /init | tee -a /artifacts/redis.log
|
||||
resources:
|
||||
requests:
|
||||
cpu: 10m
|
||||
memory: 64Mi
|
||||
cpu: 150m
|
||||
memory: 500Mi
|
||||
limits:
|
||||
cpu: 50m
|
||||
memory: 128Mi
|
||||
cpu: 200m
|
||||
memory: 500Mi
|
||||
volumeMounts:
|
||||
- name: artifacts
|
||||
readOnly: false
|
||||
mountPath: /artifacts
|
||||
- name: redis-replica
|
||||
image: "{{ images.redis }}"
|
||||
command:
|
||||
- sh
|
||||
- -ce
|
||||
- redis-server --port 6380 --slaveof localhost 6379 --slave-announce-ip localhost | tee -a /artifacts/redis.log
|
||||
resources:
|
||||
requests:
|
||||
cpu: 150m
|
||||
memory: 500Mi
|
||||
limits:
|
||||
cpu: 150m
|
||||
memory: 500Mi
|
||||
volumeMounts:
|
||||
- name: artifacts
|
||||
readOnly: false
|
||||
mountPath: /artifacts
|
||||
- name: redis-sentinel
|
||||
image: "{{ images.redis }}"
|
||||
command:
|
||||
- sh
|
||||
- -c
|
||||
- |-
|
||||
cat > /tmp/sentinel.conf <<EOF
|
||||
port 16379
|
||||
logfile ""
|
||||
dir /tmp
|
||||
sentinel announce-ip localhost
|
||||
sentinel announce-port 16379
|
||||
sentinel monitor scality-s3 localhost 6379 1
|
||||
EOF
|
||||
redis-sentinel /tmp/sentinel.conf
|
||||
resources:
|
||||
requests:
|
||||
cpu: 150m
|
||||
memory: 500Mi
|
||||
limits:
|
||||
cpu: 150m
|
||||
memory: 500Mi
|
||||
volumeMounts:
|
||||
- name: artifacts
|
||||
readOnly: false
|
||||
mountPath: /artifacts
|
||||
ports:
|
||||
- containerPort: 16379
|
||||
{% if vars.vault is defined and vars.vault == 'enabled' %}
|
||||
- name: vault
|
||||
image: registry.scality.com/vault-dev/vault:c2607856
|
||||
command:
|
||||
- bash
|
||||
- -c
|
||||
- 'chmod 400 tests/utils/keyfile && yarn start | tee -a /artifacts/vault.log'
|
||||
env:
|
||||
- name: VAULT_DB_BACKEND
|
||||
value: LEVELDB
|
||||
resources:
|
||||
requests:
|
||||
cpu: 500m
|
||||
memory: 1Gi
|
||||
limits:
|
||||
cpu: 500m
|
||||
memory: 1Gi
|
||||
volumeMounts:
|
||||
- name: artifacts
|
||||
readOnly: false
|
||||
mountPath: /artifacts
|
||||
{% endif %}
|
||||
volumes:
|
||||
- name: docker-socket
|
||||
hostPath:
|
||||
path: /var/run/docker.sock
|
||||
type: Socket
|
||||
- name: artifacts
|
||||
emptyDir: {}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM buildpack-deps:jessie-curl
|
||||
FROM buildpack-deps:bullseye-curl
|
||||
|
||||
#
|
||||
# Install apt packages needed by utapi and buildbot_worker
|
||||
|
@ -13,6 +13,10 @@ COPY eve/workers/unit_and_feature_tests/utapi_packages.list eve/workers/unit_and
|
|||
|
||||
WORKDIR /utapi
|
||||
|
||||
#
|
||||
# Install Nodejs, yarn, git-lfs and buildbot deps
|
||||
#
|
||||
|
||||
RUN wget https://nodejs.org/dist/v${NODE_VERSION}/node-v${NODE_VERSION}-linux-x64.tar.gz \
|
||||
&& tar -xf node-v${NODE_VERSION}-linux-x64.tar.gz --directory /usr/local --strip-components 1 \
|
||||
&& apt-get update -qq \
|
||||
|
@ -20,10 +24,11 @@ RUN wget https://nodejs.org/dist/v${NODE_VERSION}/node-v${NODE_VERSION}-linux-x6
|
|||
&& curl -sS http://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
|
||||
&& echo "deb http://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list \
|
||||
&& curl -sSL https://packagecloud.io/github/git-lfs/gpgkey | apt-key add - \
|
||||
&& echo "deb https://packagecloud.io/github/git-lfs/debian/ jessie main" | tee /etc/apt/sources.list.d/lfs.list \
|
||||
&& echo "deb https://packagecloud.io/github/git-lfs/debian/ bullseye main" | tee /etc/apt/sources.list.d/lfs.list \
|
||||
&& curl -sSL https://download.docker.com/linux/debian/gpg | apt-key add - \
|
||||
&& echo "deb [arch=amd64] https://download.docker.com/linux/debian bullseye stable" > /etc/apt/sources.list.d/docker.list \
|
||||
&& apt-get update -qq \
|
||||
&& cat /tmp/*packages.list | xargs apt-get install -y \
|
||||
&& pip install pip==9.0.1 \
|
||||
&& cat /tmp/*packages.list | xargs apt-get install -y --no-install-recommends\
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& rm -f /tmp/*packages.list \
|
||||
&& rm -f /etc/supervisor/conf.d/*.conf \
|
||||
|
@ -38,15 +43,18 @@ COPY package.json yarn.lock /utapi/
|
|||
RUN yarn cache clean \
|
||||
&& yarn install --frozen-lockfile \
|
||||
&& yarn cache clean
|
||||
#
|
||||
# Run buildbot-worker on startup through supervisor
|
||||
#
|
||||
ARG BUILDBOT_VERSION
|
||||
|
||||
RUN pip install buildbot-worker==$BUILDBOT_VERSION
|
||||
RUN pip3 install requests
|
||||
RUN pip3 install redis==3.5.3
|
||||
#
|
||||
# Install buildbot
|
||||
#
|
||||
|
||||
ARG BUILDBOT_VERSION=2.7.0
|
||||
RUN pip3 install buildbot-worker==$BUILDBOT_VERSION requests redis==3.5.3
|
||||
ADD eve/workers/unit_and_feature_tests/supervisor/buildbot_worker.conf /etc/supervisor/conf.d/
|
||||
ADD eve/workers/unit_and_feature_tests/redis/sentinel.conf /etc/sentinel.conf
|
||||
|
||||
#
|
||||
# Run buildbot-worker on startup through supervisor
|
||||
#
|
||||
|
||||
CMD ["supervisord", "-n"]
|
||||
|
|
|
@ -3,9 +3,6 @@ git
|
|||
git-lfs
|
||||
libffi-dev
|
||||
libssl-dev
|
||||
python2.7
|
||||
python2.7-dev
|
||||
python-pip
|
||||
sudo
|
||||
supervisor
|
||||
lsof
|
||||
|
|
|
@ -3,3 +3,6 @@ redis-server
|
|||
python3
|
||||
python3-pip
|
||||
yarn
|
||||
docker-ce-cli
|
||||
jq
|
||||
moreutils
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
FROM redis:alpine
|
||||
|
||||
ENV S6_VERSION 2.0.0.1
|
||||
ENV EXPORTER_VERSION 1.24.0
|
||||
ENV S6_BEHAVIOUR_IF_STAGE2_FAILS 2
|
||||
|
||||
RUN wget https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}/s6-overlay-amd64.tar.gz -O /tmp/s6-overlay-amd64.tar.gz \
|
||||
&& tar xzf /tmp/s6-overlay-amd64.tar.gz -C / \
|
||||
&& rm -rf /tmp/s6-overlay-amd64.tar.gz
|
||||
|
||||
RUN wget https://github.com/oliver006/redis_exporter/releases/download/v${EXPORTER_VERSION}/redis_exporter-v${EXPORTER_VERSION}.linux-amd64.tar.gz -O redis_exporter.tar.gz \
|
||||
&& tar xzf redis_exporter.tar.gz -C / \
|
||||
&& cd .. \
|
||||
&& mv /redis_exporter-v${EXPORTER_VERSION}.linux-amd64/redis_exporter /usr/local/bin/redis_exporter
|
||||
|
||||
ADD ./images/redis/s6 /etc
|
||||
CMD /init
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/with-contenv sh
|
||||
echo "starting redis exporter"
|
||||
exec redis_exporter
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/with-contenv sh
|
||||
echo "starting redis"
|
||||
exec redis-server
|
||||
|
|
@ -24,6 +24,7 @@ ENV S6_BEHAVIOUR_IF_STAGE2_FAILS 2
|
|||
|
||||
ENV WARP10_CONF_TEMPLATES ${WARP10_HOME}/conf.templates/standalone
|
||||
ENV SENSISION_DATA_DIR /data/sensision
|
||||
ENV SENSISION_PORT 8082
|
||||
|
||||
# Modify Warp 10 default config
|
||||
ENV standalone.host 0.0.0.0
|
||||
|
@ -46,7 +47,12 @@ ADD ./images/warp10/jmx_prom.yaml /opt/jmx_prom.yaml
|
|||
# Install protobuf extestion
|
||||
ADD ./images/warp10/warp10-ext-protobuf-1.2.2-uberjar.jar /opt/warp10/lib/
|
||||
|
||||
# Install Sensision exporter
|
||||
COPY --from=builder /usr/local/go/warp10_sensision_exporter /usr/local/bin/warp10_sensision_exporter
|
||||
|
||||
ADD ./images/warp10/s6 /etc
|
||||
ADD ./warpscript /usr/local/share/warpscript
|
||||
ADD ./images/warp10/static.tokens /
|
||||
|
||||
CMD /init
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
JAVA="/usr/bin/java"
|
||||
JAVA_OPTS=""
|
||||
|
||||
VERSION=1.0.21
|
||||
VERSION=1.0.23
|
||||
SENSISION_CONFIG=${SENSISION_DATA_DIR}/conf/sensision.conf
|
||||
SENSISION_JAR=${SENSISION_HOME}/bin/sensision-${VERSION}.jar
|
||||
SENSISION_CP=${SENSISION_HOME}/etc:${SENSISION_JAR}
|
||||
|
@ -14,7 +14,7 @@ if [ -z "$SENSISION_HEAP" ]; then
|
|||
SENSISION_HEAP=64m
|
||||
fi
|
||||
|
||||
SENSISION_CMD="${JAVA} ${JAVA_OPTS} -Xmx${SENSISION_HEAP} -Dsensision.server.port=0 ${SENSISION_OPTS} -Dsensision.config=${SENSISION_CONFIG} -cp ${SENSISION_CP} ${SENSISION_CLASS}"
|
||||
SENSISION_CMD="${JAVA} ${JAVA_OPTS} -Xmx${SENSISION_HEAP} -Dsensision.server.port=${SENSISION_PORT} ${SENSISION_OPTS} -Dsensision.config=${SENSISION_CONFIG} -cp ${SENSISION_CP} ${SENSISION_CLASS}"
|
||||
|
||||
if [ -n "$ENABLE_SENSISION" ]; then
|
||||
echo "Starting Sensision with $SENSISION_CMD ..."
|
||||
|
|
|
@ -12,6 +12,9 @@ const RedisClient = require('../libV2/redis');
|
|||
const REINDEX_SCHEDULE = '0 0 * * Sun';
|
||||
const REINDEX_LOCK_KEY = 's3:utapireindex:lock';
|
||||
const REINDEX_LOCK_TTL = (60 * 60) * 24;
|
||||
const REINDEX_PYTHON_INTERPRETER = process.env.REINDEX_PYTHON_INTERPRETER !== undefined
|
||||
? process.env.REINDEX_PYTHON_INTERPRETER
|
||||
: 'python3.4';
|
||||
|
||||
class UtapiReindex {
|
||||
constructor(config) {
|
||||
|
@ -109,7 +112,7 @@ class UtapiReindex {
|
|||
const flags = this._buildFlags();
|
||||
this._requestLogger.debug(`launching subprocess ${path} `
|
||||
+ `with flags: ${flags}`);
|
||||
const process = childProcess.spawn('python3.4', [
|
||||
const process = childProcess.spawn(REINDEX_PYTHON_INTERPRETER, [
|
||||
path,
|
||||
...flags,
|
||||
]);
|
||||
|
|
|
@ -16,7 +16,9 @@
|
|||
"warp10": {
|
||||
"host": "127.0.0.1",
|
||||
"port": 4802,
|
||||
"nodeId": "single_node"
|
||||
"nodeId": "single_node",
|
||||
"requestTimeout": 60000,
|
||||
"connectTimeout": 60000
|
||||
},
|
||||
"healthChecks": {
|
||||
"allowFrom": ["127.0.0.1/8", "::1"]
|
||||
|
@ -45,6 +47,10 @@
|
|||
"retentionDays": 45,
|
||||
"expirationEnabled": false
|
||||
},
|
||||
"serviceUser": {
|
||||
"arn": "arn:aws:iam::000000000000:user/service-utapi-user",
|
||||
"enabled": false
|
||||
},
|
||||
"filter": {
|
||||
"allow": {},
|
||||
"deny": {}
|
||||
|
|
|
@ -289,6 +289,8 @@ class Config {
|
|||
const warp10Conf = {
|
||||
readToken: _loadFromEnv('WARP10_READ_TOKEN', config.warp10.readToken),
|
||||
writeToken: _loadFromEnv('WARP10_WRITE_TOKEN', config.warp10.writeToken),
|
||||
requestTimeout: _loadFromEnv('WARP10_REQUEST_TIMEOUT', config.warp10.requestTimeout, _typeCasts.int),
|
||||
connectTimeout: _loadFromEnv('WARP10_CONNECT_TIMEOUT', config.warp10.connectTimeout, _typeCasts.int),
|
||||
};
|
||||
|
||||
if (Array.isArray(config.warp10.hosts) || _definedInEnv('WARP10_HOSTS')) {
|
||||
|
@ -366,6 +368,11 @@ class Config {
|
|||
|
||||
parsedConfig.bucketd = _loadFromEnv('BUCKETD_BOOTSTRAP', config.bucketd, _typeCasts.serverList);
|
||||
|
||||
parsedConfig.serviceUser = {
|
||||
arn: _loadFromEnv('SERVICE_USER_ARN', config.serviceUser.arn),
|
||||
enabled: _loadFromEnv('SERVICE_USER_ENABLED', config.serviceUser.enabled, _typeCasts.bool),
|
||||
};
|
||||
|
||||
parsedConfig.filter = Config._parseResourceFilters(config.filter);
|
||||
|
||||
return parsedConfig;
|
||||
|
|
|
@ -100,6 +100,10 @@ const schema = Joi.object({
|
|||
expirationEnabled: Joi.boolean(),
|
||||
hardLimit: Joi.string(),
|
||||
}),
|
||||
serviceUser: Joi.object({
|
||||
arn: Joi.string(),
|
||||
enabled: Joi.boolean(),
|
||||
}),
|
||||
filter: Joi.object(allowedFilterStates.reduce(
|
||||
(filterObj, state) => {
|
||||
filterObj[state] = allowedFilterFields.reduce(
|
||||
|
|
|
@ -10,7 +10,7 @@ const moduleLogger = new LoggerContext({
|
|||
|
||||
const params = {
|
||||
bucketdBootstrap: config.bucketd,
|
||||
https: config.https,
|
||||
https: config.tls,
|
||||
};
|
||||
|
||||
module.exports = new BucketClientInterface(params, bucketclient, moduleLogger);
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
/* eslint-disable no-restricted-syntax */
|
||||
const { usersBucket, splitter: mdKeySplitter, mpuBucketPrefix } = require('arsenal').constants;
|
||||
const arsenal = require('arsenal');
|
||||
|
||||
const metadata = require('./client');
|
||||
const { LoggerContext, logger } = require('../utils');
|
||||
const { keyVersionSplitter } = require('../constants');
|
||||
|
||||
const { usersBucket, splitter: mdKeySplitter, mpuBucketPrefix } = arsenal.constants;
|
||||
const { BucketInfo } = arsenal.models;
|
||||
|
||||
const moduleLogger = new LoggerContext({
|
||||
module: 'metadata.client',
|
||||
});
|
||||
|
@ -108,9 +112,25 @@ function bucketExists(bucket) {
|
|||
));
|
||||
}
|
||||
|
||||
function getBucket(bucket) {
|
||||
return new Promise((resolve, reject) => {
|
||||
metadata.getBucketAttributes(
|
||||
bucket,
|
||||
logger.newRequestLogger(), (err, data) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
resolve(BucketInfo.fromObj(data));
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
listBuckets,
|
||||
listObjects,
|
||||
listMPUs,
|
||||
bucketExists,
|
||||
getBucket,
|
||||
};
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
const { collectDefaultMetrics, register } = require('prom-client');
|
||||
|
||||
collectDefaultMetrics({
|
||||
timeout: 10000,
|
||||
gcDurationBuckets: [0.001, 0.01, 0.1, 1, 2, 5],
|
||||
});
|
||||
|
||||
async function prometheusMetrics(ctx) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
ctx.results.statusCode = 200;
|
||||
ctx.results.body = await register.metrics();
|
||||
}
|
||||
|
||||
module.exports = prometheusMetrics;
|
|
@ -28,47 +28,62 @@ async function listMetric(ctx, params) {
|
|||
end = Date.now();
|
||||
}
|
||||
|
||||
// A separate request will be made to warp 10 per requested resource
|
||||
const results = await Promise.all(
|
||||
resources.map(async ({ resource, id }) => {
|
||||
const labels = { [labelName]: id };
|
||||
let results;
|
||||
|
||||
const res = await iterIfError(warp10Clients, warp10 => {
|
||||
const options = {
|
||||
params: {
|
||||
start: convertTimestamp(start).toString(),
|
||||
end: convertTimestamp(end).toString(),
|
||||
try {
|
||||
// A separate request will be made to warp 10 per requested resource
|
||||
results = await Promise.all(
|
||||
resources.map(async ({ resource, id }) => {
|
||||
const labels = { [labelName]: id };
|
||||
|
||||
const res = await iterIfError(warp10Clients, warp10 => {
|
||||
const options = {
|
||||
params: {
|
||||
start: convertTimestamp(start).toString(),
|
||||
end: convertTimestamp(end).toString(),
|
||||
labels,
|
||||
node: warp10.nodeId,
|
||||
},
|
||||
macro: 'utapi/getMetrics',
|
||||
};
|
||||
return warp10.exec(options);
|
||||
}, error => ctx.logger.error('error during warp 10 request', {
|
||||
error,
|
||||
requestParams: {
|
||||
start,
|
||||
end,
|
||||
labels,
|
||||
node: warp10.nodeId,
|
||||
},
|
||||
macro: 'utapi/getMetrics',
|
||||
}));
|
||||
|
||||
if (res.result.length === 0) {
|
||||
ctx.logger.error('unable to retrieve metrics', { resource, type: params.level });
|
||||
throw errors.InternalError;
|
||||
}
|
||||
|
||||
const rawMetrics = JSON.parse(res.result[0]);
|
||||
|
||||
// Due to various error cases it is possible for metrics in utapi to go negative.
|
||||
// As this is nonsensical to the user we replace any negative values with zero.
|
||||
const metrics = {
|
||||
storageUtilized: rawMetrics.storageUtilized.map(positiveOrZero),
|
||||
numberOfObjects: rawMetrics.numberOfObjects.map(positiveOrZero),
|
||||
incomingBytes: positiveOrZero(rawMetrics.incomingBytes),
|
||||
outgoingBytes: positiveOrZero(rawMetrics.outgoingBytes),
|
||||
operations: rawMetrics.operations,
|
||||
};
|
||||
return warp10.exec(options);
|
||||
});
|
||||
|
||||
if (res.result.length === 0) {
|
||||
ctx.logger.error('unable to retrieve metrics', { resource, type: params.level });
|
||||
throw errors.InternalError;
|
||||
}
|
||||
return {
|
||||
resource,
|
||||
metrics,
|
||||
};
|
||||
}),
|
||||
);
|
||||
} catch (error) {
|
||||
ctx.logger.error('error fetching metrics from warp10', { error });
|
||||
throw errors.InternalError;
|
||||
}
|
||||
|
||||
const rawMetrics = JSON.parse(res.result[0]);
|
||||
|
||||
// Due to various error cases it is possible for metrics in utapi to go negative.
|
||||
// As this is nonsensical to the user we replace any negative values with zero.
|
||||
const metrics = {
|
||||
storageUtilized: rawMetrics.storageUtilized.map(positiveOrZero),
|
||||
numberOfObjects: rawMetrics.numberOfObjects.map(positiveOrZero),
|
||||
incomingBytes: positiveOrZero(rawMetrics.incomingBytes),
|
||||
outgoingBytes: positiveOrZero(rawMetrics.outgoingBytes),
|
||||
operations: rawMetrics.operations,
|
||||
};
|
||||
|
||||
return {
|
||||
resource,
|
||||
metrics,
|
||||
};
|
||||
}),
|
||||
);
|
||||
|
||||
// Convert the results from warp10 into the expected response format
|
||||
const resp = results
|
||||
|
|
|
@ -118,6 +118,10 @@ async function authV4Middleware(request, response, params) {
|
|||
[passed, authorizedResources] = await translateAndAuthorize(request, action, params.level, requestedResources);
|
||||
} catch (error) {
|
||||
request.logger.error('error during authentication', { error });
|
||||
// rethrow any access denied errors
|
||||
if (error.AccessDenied) {
|
||||
throw error;
|
||||
}
|
||||
throw errors.InternalError;
|
||||
}
|
||||
|
||||
|
|
|
@ -51,6 +51,13 @@ class ReindexTask extends BaseTask {
|
|||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!Number.isInteger(obj.value['content-length'])) {
|
||||
logger.debug('object missing content-length, not including in count');
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
count += 1;
|
||||
size += obj.value['content-length'];
|
||||
|
||||
|
|
179
libV2/vault.js
179
libV2/vault.js
|
@ -1,179 +0,0 @@
|
|||
const assert = require('assert');
|
||||
const { auth, policies } = require('arsenal');
|
||||
const vaultclient = require('vaultclient');
|
||||
const config = require('./config');
|
||||
const errors = require('./errors');
|
||||
|
||||
/**
|
||||
@class Vault
|
||||
* Creates a vault instance for authentication and authorization
|
||||
*/
|
||||
|
||||
class Vault {
|
||||
constructor(options) {
|
||||
const { host, port } = options.vaultd;
|
||||
if (options.tls) {
|
||||
const { key, cert, ca } = options.tls;
|
||||
this._client = new vaultclient.Client(host, port, true, key, cert,
|
||||
ca);
|
||||
} else {
|
||||
this._client = new vaultclient.Client(host, port);
|
||||
}
|
||||
}
|
||||
|
||||
/** authenticateV4Request
|
||||
*
|
||||
* @param {object} params - the authentication parameters as returned by
|
||||
* auth.extractParams
|
||||
* @param {number} params.version - shall equal 4
|
||||
* @param {string} params.data.accessKey - the user's accessKey
|
||||
* @param {string} params.data.signatureFromRequest - the signature read from
|
||||
* the request
|
||||
* @param {string} params.data.region - the AWS region
|
||||
* @param {string} params.data.stringToSign - the stringToSign
|
||||
* @param {string} params.data.scopeDate - the timespan to allow the request
|
||||
* @param {string} params.data.authType - the type of authentication
|
||||
* (query or header)
|
||||
* @param {string} params.data.signatureVersion - the version of the
|
||||
* signature (AWS or AWS4)
|
||||
* @param {number} params.data.signatureAge - the age of the signature in ms
|
||||
* @param {string} params.data.log - the logger object
|
||||
* @param {RequestContext []} requestContexts - an array of
|
||||
* RequestContext instances which contain information
|
||||
* for policy authorization check
|
||||
* @param {function} callback - cb(err)
|
||||
* @return {undefined}
|
||||
*/
|
||||
authenticateV4Request(params, requestContexts, callback) {
|
||||
const {
|
||||
accessKey, signatureFromRequest, region, scopeDate,
|
||||
stringToSign,
|
||||
} = params.data;
|
||||
const { log } = params;
|
||||
log.debug('authenticating V4 request');
|
||||
const serializedRCs = requestContexts.map(rc => rc.serialize());
|
||||
this._client.verifySignatureV4(
|
||||
stringToSign, signatureFromRequest,
|
||||
accessKey, region, scopeDate,
|
||||
{ reqUid: log.getSerializedUids(), requestContext: serializedRCs },
|
||||
(err, authInfo) => {
|
||||
if (err) {
|
||||
log.trace('error from vault', { error: err });
|
||||
return callback(err);
|
||||
}
|
||||
return callback(null,
|
||||
authInfo.message.body.authorizationResults);
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns canonical Ids for a given list of account Ids
|
||||
* @param {string[]} accountIds - list of account ids
|
||||
* @param {object} log - Werelogs request logger
|
||||
* @return {Promise} -
|
||||
*/
|
||||
getCanonicalIds(accountIds, log) {
|
||||
log.debug('retrieving canonical ids for account ids', {
|
||||
method: 'Vault.getCanonicalIds',
|
||||
});
|
||||
return new Promise((resolve, reject) =>
|
||||
this._client.getCanonicalIdsByAccountIds(accountIds,
|
||||
{ reqUid: log.getSerializedUids(), logger: log }, (err, res) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
if (!res.message || !res.message.body) {
|
||||
reject(errors.InternalError);
|
||||
return;
|
||||
}
|
||||
resolve(res.message.body.map(acc => ({
|
||||
resource: acc.accountId,
|
||||
id: acc.canonicalId,
|
||||
})));
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
const vault = new Vault(config);
|
||||
auth.setHandler(vault);
|
||||
|
||||
async function translateResourceIds(level, resources, log) {
|
||||
if (level === 'accounts') {
|
||||
return vault.getCanonicalIds(resources, log);
|
||||
}
|
||||
|
||||
return resources.map(resource => ({ resource, id: resource }));
|
||||
}
|
||||
|
||||
async function authenticateRequest(request, action, level, resources) {
|
||||
const policyContext = new policies.RequestContext(
|
||||
request.headers,
|
||||
request.query,
|
||||
level,
|
||||
resources,
|
||||
request.ip,
|
||||
request.ctx.encrypted,
|
||||
action,
|
||||
'utapi',
|
||||
);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
auth.server.doAuth(request, request.logger.logger, (err, res) => {
|
||||
if (err && (err.InvalidAccessKeyId || err.AccessDenied)) {
|
||||
resolve([false]);
|
||||
return;
|
||||
}
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
// Will only have res if request is from a user rather than an account
|
||||
let authorizedResources = resources;
|
||||
if (res) {
|
||||
try {
|
||||
authorizedResources = res.reduce(
|
||||
(authed, result) => {
|
||||
if (result.isAllowed) {
|
||||
// result.arn should be of format:
|
||||
// arn:scality:utapi:::resourcetype/resource
|
||||
assert(typeof result.arn === 'string');
|
||||
assert(result.arn.indexOf('/') > -1);
|
||||
const resource = result.arn.split('/')[1];
|
||||
authed.push(resource);
|
||||
request.logger.trace('access granted for resource', { resource });
|
||||
}
|
||||
return authed;
|
||||
}, [],
|
||||
);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
} else {
|
||||
request.logger.trace('granted access to all resources');
|
||||
}
|
||||
|
||||
resolve([
|
||||
authorizedResources.length !== 0,
|
||||
authorizedResources,
|
||||
]);
|
||||
}, 's3', [policyContext]);
|
||||
});
|
||||
}
|
||||
|
||||
async function translateAndAuthorize(request, action, level, resources) {
|
||||
const [authed, authorizedResources] = await authenticateRequest(request, action, level, resources);
|
||||
if (!authed) {
|
||||
return [authed];
|
||||
}
|
||||
const translated = await translateResourceIds(level, authorizedResources, request.logger.logger);
|
||||
return [authed, translated];
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
authenticateRequest,
|
||||
translateAndAuthorize,
|
||||
Vault,
|
||||
vault,
|
||||
};
|
|
@ -0,0 +1,128 @@
|
|||
const assert = require('assert');
|
||||
const { auth, policies } = require('arsenal');
|
||||
const vaultclient = require('vaultclient');
|
||||
const config = require('../config');
|
||||
const errors = require('../errors');
|
||||
/**
|
||||
@class Vault
|
||||
* Creates a vault instance for authentication and authorization
|
||||
*/
|
||||
|
||||
class VaultWrapper extends auth.Vault {
|
||||
constructor(options) {
|
||||
let client;
|
||||
const { host, port } = options.vaultd;
|
||||
if (options.tls) {
|
||||
const { key, cert, ca } = options.tls;
|
||||
client = new vaultclient.Client(host, port, true, key, cert,
|
||||
ca);
|
||||
} else {
|
||||
client = new vaultclient.Client(host, port);
|
||||
}
|
||||
super(client, 'vault');
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns canonical Ids for a given list of account Ids
|
||||
* @param {string[]} accountIds - list of account ids
|
||||
* @param {object} log - Werelogs request logger
|
||||
* @return {Promise} -
|
||||
*/
|
||||
getCanonicalIds(accountIds, log) {
|
||||
log.debug('retrieving canonical ids for account ids', {
|
||||
method: 'Vault.getCanonicalIds',
|
||||
accountIds,
|
||||
});
|
||||
return new Promise((resolve, reject) =>
|
||||
this.client.getCanonicalIdsByAccountIds(accountIds,
|
||||
{ reqUid: log.getSerializedUids(), logger: log }, (err, res) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
if (!res.message || !res.message.body) {
|
||||
reject(errors.InternalError);
|
||||
return;
|
||||
}
|
||||
resolve(res.message.body.map(acc => ({
|
||||
resource: acc.accountId,
|
||||
id: acc.canonicalId,
|
||||
})));
|
||||
}));
|
||||
}
|
||||
|
||||
// eslint-disable-next-line class-methods-use-this
|
||||
authenticateRequest(request, action, level, resources) {
|
||||
const policyContext = new policies.RequestContext(
|
||||
request.headers,
|
||||
request.query,
|
||||
level,
|
||||
resources,
|
||||
request.ip,
|
||||
request.ctx.encrypted,
|
||||
action,
|
||||
'utapi',
|
||||
);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
auth.server.doAuth(
|
||||
request,
|
||||
request.logger.logger,
|
||||
(err, authInfo, authRes) => {
|
||||
if (err && (err.InvalidAccessKeyId || err.AccessDenied)) {
|
||||
resolve({ authed: false });
|
||||
return;
|
||||
}
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
|
||||
// Only IAM users will return authorizedResources
|
||||
let authorizedResources = resources;
|
||||
if (authRes) {
|
||||
authorizedResources = authRes
|
||||
.filter(resource => resource.isAllowed)
|
||||
.map(resource => {
|
||||
// resource.arn should be of format:
|
||||
// arn:scality:utapi:::resourcetype/resource
|
||||
assert(typeof resource.arn === 'string');
|
||||
assert(resource.arn.indexOf('/') > -1);
|
||||
return resource.arn.split('/')[1];
|
||||
});
|
||||
}
|
||||
|
||||
resolve({ authed: true, authInfo, authorizedResources });
|
||||
}, 's3', [policyContext],
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
getUsersById(userIds, log) {
|
||||
log.debug('retrieving user arns for user ids', {
|
||||
method: 'Vault.getUsersById',
|
||||
userIds,
|
||||
});
|
||||
return new Promise((resolve, reject) =>
|
||||
this.client.getUsersById(userIds,
|
||||
{ reqUid: log.getSerializedUids(), logger: log }, (err, res) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
if (!res.message || !res.message.body) {
|
||||
reject(errors.InternalError);
|
||||
return;
|
||||
}
|
||||
resolve(res.message.body);
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
const vault = new VaultWrapper(config);
|
||||
auth.setHandler(vault);
|
||||
|
||||
module.exports = {
|
||||
VaultWrapper,
|
||||
vault,
|
||||
};
|
|
@ -0,0 +1,172 @@
|
|||
const { vault } = require('./client');
|
||||
const metadata = require('../metadata');
|
||||
const errors = require('../errors');
|
||||
const config = require('../config');
|
||||
|
||||
async function authorizeAccountAccessKey(authInfo, level, resources, log) {
|
||||
let authed = false;
|
||||
let authedRes = [];
|
||||
|
||||
log.trace('Authorizing account', { resources });
|
||||
|
||||
switch (level) {
|
||||
// Account keys can only query metrics their own account metrics
|
||||
// So we can short circuit the auth to ->
|
||||
// Did they request their account? Then authorize ONLY their account
|
||||
case 'accounts':
|
||||
authed = resources.some(r => r === authInfo.getShortid());
|
||||
authedRes = authed ? [{ resource: authInfo.getShortid(), id: authInfo.getCanonicalID() }] : [];
|
||||
break;
|
||||
|
||||
// Account keys are allowed access to any of their child users metrics
|
||||
case 'users': {
|
||||
let users;
|
||||
try {
|
||||
users = await vault.getUsersById(resources, log.logger);
|
||||
} catch (error) {
|
||||
log.error('failed to fetch user', { error });
|
||||
throw errors.AccessDenied;
|
||||
}
|
||||
authedRes = users
|
||||
.filter(user => user.parentId === authInfo.getShortid())
|
||||
.map(user => ({ resource: user.id, id: user.id }));
|
||||
authed = authedRes.length !== 0;
|
||||
break;
|
||||
}
|
||||
|
||||
// Accounts are only allowed access if they are the owner of the bucket
|
||||
case 'buckets': {
|
||||
const buckets = await Promise.all(
|
||||
resources.map(async bucket => {
|
||||
try {
|
||||
const bucketMD = await metadata.getBucket(bucket);
|
||||
return bucketMD;
|
||||
} catch (error) {
|
||||
log.error('failed to fetch metadata for bucket', { error, bucket });
|
||||
throw errors.AccessDenied;
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
authedRes = buckets
|
||||
.filter(bucket => bucket.getOwner() === authInfo.getCanonicalID())
|
||||
.map(bucket => ({ resource: bucket.getName(), id: bucket.getName() }));
|
||||
authed = authedRes.length !== 0;
|
||||
break;
|
||||
}
|
||||
|
||||
// Accounts can not access service resources
|
||||
case 'services':
|
||||
break;
|
||||
|
||||
default:
|
||||
log.error('Unknown metric level', { level });
|
||||
throw new Error(`Unknown metric level ${level}`);
|
||||
}
|
||||
|
||||
return [authed, authedRes];
|
||||
}
|
||||
|
||||
async function authorizeUserAccessKey(authInfo, level, resources, log) {
|
||||
let authed = false;
|
||||
let authedRes = [];
|
||||
|
||||
log.trace('Authorizing IAM user', { resources });
|
||||
|
||||
// Get the parent account id from the user's arn
|
||||
const parentAccountId = authInfo.getArn().split(':')[4];
|
||||
|
||||
// All users require an attached policy to query metrics
|
||||
// Additional filtering is performed here to limit access to the user's account
|
||||
switch (level) {
|
||||
// User keys can only query metrics their own account metrics
|
||||
// So we can short circuit the auth to ->
|
||||
// Did they request their account? Then authorize ONLY their account
|
||||
case 'accounts': {
|
||||
authed = resources.some(r => r === parentAccountId);
|
||||
authedRes = authed ? [{ resource: parentAccountId, id: authInfo.getCanonicalID() }] : [];
|
||||
break;
|
||||
}
|
||||
|
||||
// Users can query other user's metrics if they are under the same account
|
||||
case 'users': {
|
||||
let users;
|
||||
try {
|
||||
users = await vault.getUsersById(resources, log.logger);
|
||||
} catch (error) {
|
||||
log.error('failed to fetch user', { error });
|
||||
throw errors.AccessDenied;
|
||||
}
|
||||
authedRes = users
|
||||
.filter(user => user.parentId === parentAccountId)
|
||||
.map(user => ({ resource: user.id, id: user.id }));
|
||||
authed = authedRes.length !== 0;
|
||||
break;
|
||||
}
|
||||
|
||||
// Users can query bucket metrics if they are owned by the same account
|
||||
case 'buckets': {
|
||||
let buckets;
|
||||
try {
|
||||
buckets = await Promise.all(
|
||||
resources.map(bucket => metadata.getBucket(bucket)),
|
||||
);
|
||||
} catch (error) {
|
||||
log.error('failed to fetch metadata for bucket', { error });
|
||||
throw error;
|
||||
}
|
||||
authedRes = buckets
|
||||
.filter(bucket => bucket.getOwner() === authInfo.getCanonicalID())
|
||||
.map(bucket => ({ resource: bucket.getName(), id: bucket.getName() }));
|
||||
authed = authedRes.length !== 0;
|
||||
break;
|
||||
}
|
||||
|
||||
case 'services':
|
||||
break;
|
||||
|
||||
default:
|
||||
log.error('Unknown metric level', { level });
|
||||
throw new Error(`Unknown metric level ${level}`);
|
||||
}
|
||||
return [authed, authedRes];
|
||||
}
|
||||
|
||||
async function authorizeServiceUser(authInfo, level, resources, log) {
|
||||
log.trace('Authorizing service user', { resources, arn: authInfo.getArn() });
|
||||
// The service user is allowed access to any resource so no checking is done
|
||||
if (level === 'accounts') {
|
||||
const canonicalIds = await vault.getCanonicalIds(resources, log.logger);
|
||||
return [canonicalIds.length !== 0, canonicalIds];
|
||||
}
|
||||
|
||||
return [resources.length !== 0, resources.map(resource => ({ resource, id: resource }))];
|
||||
}
|
||||
|
||||
|
||||
async function translateAndAuthorize(request, action, level, resources) {
|
||||
const {
|
||||
authed,
|
||||
authInfo,
|
||||
authorizedResources,
|
||||
} = await vault.authenticateRequest(request, action, level, resources);
|
||||
|
||||
if (!authed) {
|
||||
return [false, []];
|
||||
}
|
||||
|
||||
if (config.serviceUser.enabled && authInfo.getArn() === config.serviceUser.arn) {
|
||||
return authorizeServiceUser(authInfo, level, authorizedResources, request.logger);
|
||||
}
|
||||
|
||||
if (authInfo.isRequesterAnIAMUser()) {
|
||||
return authorizeUserAccessKey(authInfo, level, authorizedResources, request.logger);
|
||||
}
|
||||
|
||||
return authorizeAccountAccessKey(authInfo, level, authorizedResources, request.logger);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
translateAndAuthorize,
|
||||
vault,
|
||||
};
|
|
@ -3,6 +3,11 @@ const needle = require('needle');
|
|||
const assert = require('assert');
|
||||
const { eventFieldsToWarp10, warp10EventType } = require('./constants');
|
||||
const _config = require('./config');
|
||||
const { LoggerContext } = require('./utils');
|
||||
|
||||
const moduleLogger = new LoggerContext({
|
||||
module: 'warp10',
|
||||
});
|
||||
|
||||
class Warp10Client {
|
||||
constructor(config) {
|
||||
|
@ -74,6 +79,7 @@ class Warp10Client {
|
|||
async exec(params) {
|
||||
const payload = this._buildExecPayload(params);
|
||||
const resp = await this._client.exec(payload);
|
||||
moduleLogger.info('warpscript executed', { stats: resp.meta });
|
||||
return resp;
|
||||
}
|
||||
|
||||
|
@ -123,6 +129,8 @@ const clients = _config.warp10.hosts.map(
|
|||
val => new Warp10Client({
|
||||
readToken: _config.warp10.readToken,
|
||||
writeToken: _config.warp10.writeToken,
|
||||
connectTimeout: _config.warp10.connectTimeout,
|
||||
requestTimeout: _config.warp10.requestTimeout,
|
||||
...val,
|
||||
}),
|
||||
);
|
||||
|
|
16
openapi.yaml
16
openapi.yaml
|
@ -101,6 +101,12 @@ components:
|
|||
type: string
|
||||
level:
|
||||
type: string
|
||||
utapi-get-prometheus-metrics:
|
||||
description: metrics to be ingested by prometheus
|
||||
content:
|
||||
text/plain:
|
||||
schema:
|
||||
type: string
|
||||
parameters:
|
||||
level:
|
||||
in: path
|
||||
|
@ -133,6 +139,16 @@ paths:
|
|||
$ref: '#/components/responses/json-error'
|
||||
200:
|
||||
description: Service is healthy
|
||||
/_/metrics:
|
||||
get:
|
||||
x-router-controller: internal
|
||||
x-iplimit: true
|
||||
operationId: prometheusMetrics
|
||||
responses:
|
||||
default:
|
||||
$ref: '#/components/responses/json-error'
|
||||
200:
|
||||
$ref: '#/components/responses/utapi-get-prometheus-metrics'
|
||||
/v2/ingest:
|
||||
post:
|
||||
x-router-controller: metrics
|
||||
|
|
10
package.json
10
package.json
|
@ -21,6 +21,7 @@
|
|||
"@senx/warp10": "^1.0.14",
|
||||
"arsenal": "scality/Arsenal#65966f5",
|
||||
"async": "^3.2.0",
|
||||
"aws-sdk": "^2.1005.0",
|
||||
"aws4": "^1.8.0",
|
||||
"backo": "^1.1.0",
|
||||
"body-parser": "^1.19.0",
|
||||
|
@ -37,8 +38,9 @@
|
|||
"needle": "^2.5.0",
|
||||
"node-schedule": "^1.3.2",
|
||||
"oas-tools": "^2.1.8",
|
||||
"prom-client": "^13.1.0",
|
||||
"uuid": "^3.3.2",
|
||||
"vaultclient": "scality/vaultclient#ff9e92f",
|
||||
"vaultclient": "scality/vaultclient#8c4d210",
|
||||
"werelogs": "scality/werelogs#0a4c576"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
@ -51,6 +53,12 @@
|
|||
"protobufjs": "^6.10.1",
|
||||
"sinon": "^9.0.2"
|
||||
},
|
||||
"resolutions": {
|
||||
"**/@yarnpkg/fslib": "2.4.0",
|
||||
"**/@yarnpkg/libzip": "2.2.1",
|
||||
"**/@yarnpkg/json-proxy": "2.1.0",
|
||||
"**/@yarnpkg/parsers": "2.3.0"
|
||||
},
|
||||
"scripts": {
|
||||
"ft_test": "mocha --recursive tests/functional",
|
||||
"ft_test:client": "mocha --recursive tests/functional/client",
|
||||
|
|
|
@ -9,7 +9,8 @@ const { convertTimestamp, now } = require('../../../../libV2/utils');
|
|||
const { operationToResponse } = require('../../../../libV2/constants');
|
||||
|
||||
const { generateCustomEvents } = require('../../../utils/v2Data');
|
||||
const { UtapiMetric } = require('../../../../libV2/models');
|
||||
const { BucketD } = require('../../../utils/mock/');
|
||||
const vaultclient = require('../../../utils/vaultclient');
|
||||
|
||||
const warp10 = warp10Clients[0];
|
||||
const _now = Math.floor(new Date().getTime() / 1000);
|
||||
|
@ -28,7 +29,7 @@ const emptyOperationsResponse = Object.values(operationToResponse)
|
|||
return prev;
|
||||
}, {});
|
||||
|
||||
async function listMetrics(level, resources, start, end, force403 = false) {
|
||||
async function listMetrics(level, resources, start, end, credentials) {
|
||||
const body = {
|
||||
[level]: resources,
|
||||
};
|
||||
|
@ -50,12 +51,13 @@ async function listMetrics(level, resources, start, end, force403 = false) {
|
|||
},
|
||||
};
|
||||
|
||||
const credentials = {
|
||||
accessKeyId: force403 ? 'invalidKey' : 'accessKey1',
|
||||
secretAccessKey: 'verySecretKey1',
|
||||
const { accessKey: accessKeyId, secretKey: secretAccessKey } = credentials;
|
||||
const _credentials = {
|
||||
accessKeyId,
|
||||
secretAccessKey,
|
||||
};
|
||||
|
||||
const sig = aws4.sign(headers, credentials);
|
||||
const sig = aws4.sign(headers, _credentials);
|
||||
|
||||
return needle(
|
||||
'post',
|
||||
|
@ -81,140 +83,202 @@ function opsToResp(operations) {
|
|||
}, { ...emptyOperationsResponse });
|
||||
}
|
||||
|
||||
const testCases = [
|
||||
{
|
||||
desc: 'for a single resource',
|
||||
args: { [uuid.v4()]: { [uuid.v4()]: [uuid.v4()] } },
|
||||
},
|
||||
{
|
||||
desc: 'for multiple resources',
|
||||
args: {
|
||||
[uuid.v4()]: {
|
||||
[uuid.v4()]: [uuid.v4(), uuid.v4(), uuid.v4()],
|
||||
[uuid.v4()]: [uuid.v4(), uuid.v4(), uuid.v4()],
|
||||
},
|
||||
[uuid.v4()]: {
|
||||
[uuid.v4()]: [uuid.v4(), uuid.v4(), uuid.v4()],
|
||||
[uuid.v4()]: [uuid.v4(), uuid.v4(), uuid.v4()],
|
||||
},
|
||||
[uuid.v4()]: {
|
||||
[uuid.v4()]: [uuid.v4(), uuid.v4(), uuid.v4()],
|
||||
[uuid.v4()]: [uuid.v4(), uuid.v4(), uuid.v4()],
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
function assertMetricResponse(provided, expected) {
|
||||
assert.deepStrictEqual(provided.operations, opsToResp(expected.ops));
|
||||
assert.strictEqual(provided.incomingBytes, expected.in);
|
||||
assert.strictEqual(provided.outgoingBytes, expected.out);
|
||||
assert.deepStrictEqual(provided.storageUtilized, [0, expected.bytes]);
|
||||
assert.deepStrictEqual(provided.numberOfObjects, [0, expected.count]);
|
||||
}
|
||||
|
||||
describe('Test listMetric', function () {
|
||||
this.timeout(10000);
|
||||
testCases.forEach(testCase => {
|
||||
describe(testCase.desc, () => {
|
||||
let totals;
|
||||
before(async () => {
|
||||
const { events, totals: _totals } = generateCustomEvents(
|
||||
getTs(-360),
|
||||
getTs(-60),
|
||||
1000,
|
||||
testCase.args,
|
||||
);
|
||||
totals = _totals;
|
||||
assert(await ingestEvents(events));
|
||||
const bucketd = new BucketD(true);
|
||||
|
||||
let account;
|
||||
let user;
|
||||
let otherAccount;
|
||||
let otherUser;
|
||||
let serviceAccount;
|
||||
let serviceUser;
|
||||
const bucket = uuid.v4();
|
||||
const otherBucket = uuid.v4();
|
||||
let totals;
|
||||
|
||||
before(async () => {
|
||||
account = await vaultclient.createAccountAndKeys(uuid.v4());
|
||||
user = await vaultclient.createUserAndKeys(account, uuid.v4());
|
||||
otherAccount = await vaultclient.createAccountAndKeys(uuid.v4());
|
||||
otherUser = await vaultclient.createUser(otherAccount, uuid.v4());
|
||||
serviceAccount = await vaultclient.createInternalServiceAccountAndKeys();
|
||||
serviceUser = await vaultclient.createUserAndKeys(serviceAccount, 'service-utapi-user');
|
||||
|
||||
await Promise.all([
|
||||
vaultclient.createAndAttachUtapiPolicy(account, user, 'buckets', '*'),
|
||||
vaultclient.createAndAttachUtapiPolicy(account, user, 'accounts', '*'),
|
||||
vaultclient.createAndAttachUtapiPolicy(account, user, 'users', '*'),
|
||||
vaultclient.createAndAttachUtapiPolicy(serviceAccount, serviceUser, 'buckets', '*'),
|
||||
vaultclient.createAndAttachUtapiPolicy(serviceAccount, serviceUser, 'accounts', '*'),
|
||||
vaultclient.createAndAttachUtapiPolicy(serviceAccount, serviceUser, 'users', '*'),
|
||||
]);
|
||||
|
||||
bucketd.createBucketsWithOwner([
|
||||
{ name: bucket, owner: account.canonicalId },
|
||||
{ name: otherBucket, owner: otherAccount.canonicalId },
|
||||
]);
|
||||
bucketd.start();
|
||||
|
||||
const { events, totals: _totals } = generateCustomEvents(
|
||||
getTs(-360),
|
||||
getTs(-60),
|
||||
1000,
|
||||
{ [account.canonicalId]: { [user.id]: [bucket] } },
|
||||
);
|
||||
totals = _totals;
|
||||
assert(await ingestEvents(events));
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
bucketd.end();
|
||||
await warp10.delete({
|
||||
className: '~.*',
|
||||
start: 0,
|
||||
end: now(),
|
||||
});
|
||||
await vaultclient.cleanupAccountAndUsers(account);
|
||||
await vaultclient.cleanupAccountAndUsers(otherAccount);
|
||||
await vaultclient.cleanupAccountAndUsers(serviceAccount);
|
||||
});
|
||||
|
||||
describe('test account credentials', () => {
|
||||
it('should list metrics for the same account', async () => {
|
||||
const resp = await listMetrics('accounts', [account.id], getTs(-500), getTs(0), account);
|
||||
assert(Array.isArray(resp.body));
|
||||
const { body } = resp;
|
||||
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.accounts]), [account.id]);
|
||||
body.forEach(metric => {
|
||||
assertMetricResponse(metric, totals.accounts[account.canonicalId]);
|
||||
});
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await warp10.delete({
|
||||
className: '~.*',
|
||||
start: 0,
|
||||
end: now(),
|
||||
});
|
||||
it("should list metrics for an account's user", async () => {
|
||||
const resp = await listMetrics('users', [user.id], getTs(-500), getTs(0), account);
|
||||
assert(Array.isArray(resp.body));
|
||||
const { body } = resp;
|
||||
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.users]), [user.id]);
|
||||
body.forEach(metric => {
|
||||
assertMetricResponse(metric, totals.users[user.id]);
|
||||
});
|
||||
});
|
||||
|
||||
const accounts = [];
|
||||
const users = [];
|
||||
const buckets = [];
|
||||
it("should list metrics for an account's bucket", async () => {
|
||||
const resp = await listMetrics('buckets', [bucket], getTs(-500), getTs(0), account);
|
||||
assert(Array.isArray(resp.body));
|
||||
const { body } = resp;
|
||||
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.buckets]), [bucket]);
|
||||
body.forEach(metric => {
|
||||
assertMetricResponse(metric, totals.buckets[bucket]);
|
||||
});
|
||||
});
|
||||
|
||||
Object.entries(testCase.args)
|
||||
.forEach(([account, _users]) => {
|
||||
accounts.push(`account:${account}`);
|
||||
Object.entries(_users).forEach(([user, _buckets]) => {
|
||||
users.push(user);
|
||||
buckets.push(..._buckets);
|
||||
});
|
||||
});
|
||||
it('should return Access Denied for a different account', async () => {
|
||||
const resp = await listMetrics('accounts', [otherAccount.id], getTs(-500), getTs(0), account);
|
||||
assert.strictEqual(resp.statusCode, 403);
|
||||
assert.deepStrictEqual(resp.body, { code: 'AccessDenied', message: 'Access Denied' });
|
||||
});
|
||||
|
||||
const metricQueries = {
|
||||
accounts,
|
||||
users,
|
||||
buckets,
|
||||
};
|
||||
it("should return Access Denied for a different account's user", async () => {
|
||||
const resp = await listMetrics('users', [otherUser.id], getTs(-500), getTs(0), account);
|
||||
assert.strictEqual(resp.statusCode, 403);
|
||||
assert.deepStrictEqual(resp.body, { code: 'AccessDenied', message: 'Access Denied' });
|
||||
});
|
||||
|
||||
Object.entries(metricQueries)
|
||||
.forEach(query => {
|
||||
const [level, resources] = query;
|
||||
it(`should get metrics for ${level}`, async () => {
|
||||
const resp = await listMetrics(...query, getTs(-500), getTs(0));
|
||||
assert(Array.isArray(resp.body));
|
||||
|
||||
const { body } = resp;
|
||||
assert.deepStrictEqual(body.map(r => r[metricResponseKeys[level]]), resources);
|
||||
|
||||
body.forEach(metric => {
|
||||
const key = metric[metricResponseKeys[level]];
|
||||
const _key = level === 'accounts' ? key.split(':')[1] : key;
|
||||
const expected = totals[level][_key];
|
||||
assert.deepStrictEqual(metric.operations, opsToResp(expected.ops));
|
||||
assert.strictEqual(metric.incomingBytes, expected.in);
|
||||
assert.strictEqual(metric.outgoingBytes, expected.out);
|
||||
assert.deepStrictEqual(metric.storageUtilized, [0, expected.bytes]);
|
||||
assert.deepStrictEqual(metric.numberOfObjects, [0, expected.count]);
|
||||
});
|
||||
});
|
||||
});
|
||||
it("should return Access Denied for a different account's bucket", async () => {
|
||||
const resp = await listMetrics('buckets', [otherBucket], getTs(-500), getTs(0), account);
|
||||
assert.strictEqual(resp.statusCode, 403);
|
||||
assert.deepStrictEqual(resp.body, { code: 'AccessDenied', message: 'Access Denied' });
|
||||
});
|
||||
});
|
||||
|
||||
it('should return 0 in metrics are negative', async () => {
|
||||
const bucket = `imabucket-${uuid.v4()}`;
|
||||
const account = `imaaccount-${uuid.v4()}`;
|
||||
const event = new UtapiMetric({
|
||||
timestamp: getTs(0),
|
||||
bucket,
|
||||
account,
|
||||
objectDelta: -1,
|
||||
sizeDelta: -1,
|
||||
incomingBytes: -1,
|
||||
outgoingBytes: -1,
|
||||
operationId: 'putObject',
|
||||
describe('test user credentials', () => {
|
||||
it('should list metrics for the same account', async () => {
|
||||
const resp = await listMetrics('accounts', [account.id], getTs(-500), getTs(0), user);
|
||||
assert(Array.isArray(resp.body));
|
||||
const { body } = resp;
|
||||
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.accounts]), [account.id]);
|
||||
body.forEach(metric => {
|
||||
assertMetricResponse(metric, totals.accounts[account.canonicalId]);
|
||||
});
|
||||
});
|
||||
|
||||
await ingestEvents([event]);
|
||||
it("should list metrics for a user's account", async () => {
|
||||
const resp = await listMetrics('users', [user.id], getTs(-500), getTs(0), user);
|
||||
assert(Array.isArray(resp.body));
|
||||
const { body } = resp;
|
||||
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.users]), [user.id]);
|
||||
body.forEach(metric => {
|
||||
assertMetricResponse(metric, totals.users[user.id]);
|
||||
});
|
||||
});
|
||||
|
||||
const bucketResp = await listMetrics('buckets', [bucket], getTs(-1), getTs(1));
|
||||
assert(Array.isArray(bucketResp.body));
|
||||
it("should list metrics for an user's bucket", async () => {
|
||||
const resp = await listMetrics('buckets', [bucket], getTs(-500), getTs(0), user);
|
||||
assert(Array.isArray(resp.body));
|
||||
const { body } = resp;
|
||||
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.buckets]), [bucket]);
|
||||
body.forEach(metric => {
|
||||
assertMetricResponse(metric, totals.buckets[bucket]);
|
||||
});
|
||||
});
|
||||
|
||||
const [bucketMetric] = bucketResp.body;
|
||||
assert.deepStrictEqual(bucketMetric.storageUtilized, [0, 0]);
|
||||
assert.deepStrictEqual(bucketMetric.numberOfObjects, [0, 0]);
|
||||
assert.deepStrictEqual(bucketMetric.incomingBytes, 0);
|
||||
assert.deepStrictEqual(bucketMetric.outgoingBytes, 0);
|
||||
it("should return Access Denied for a different user's account", async () => {
|
||||
const resp = await listMetrics('accounts', [otherAccount.id], getTs(-500), getTs(0), user);
|
||||
assert.strictEqual(resp.statusCode, 403);
|
||||
assert.deepStrictEqual(resp.body, { code: 'AccessDenied', message: 'Access Denied' });
|
||||
});
|
||||
|
||||
const accountResp = await listMetrics('accounts', [account], getTs(-1), getTs(1));
|
||||
assert(Array.isArray(accountResp.body));
|
||||
it("should return Access Denied for a different account's user", async () => {
|
||||
const resp = await listMetrics('users', [otherUser.id], getTs(-500), getTs(0), user);
|
||||
assert.strictEqual(resp.statusCode, 403);
|
||||
assert.deepStrictEqual(resp.body, { code: 'AccessDenied', message: 'Access Denied' });
|
||||
});
|
||||
|
||||
const [accountMetric] = accountResp.body;
|
||||
assert.deepStrictEqual(accountMetric.storageUtilized, [0, 0]);
|
||||
assert.deepStrictEqual(accountMetric.numberOfObjects, [0, 0]);
|
||||
assert.deepStrictEqual(accountMetric.incomingBytes, 0);
|
||||
assert.deepStrictEqual(accountMetric.outgoingBytes, 0);
|
||||
it("should return Access Denied for a different account's bucket", async () => {
|
||||
const resp = await listMetrics('buckets', [otherBucket], getTs(-500), getTs(0), user);
|
||||
assert.strictEqual(resp.statusCode, 403);
|
||||
assert.deepStrictEqual(resp.body, { code: 'AccessDenied', message: 'Access Denied' });
|
||||
});
|
||||
});
|
||||
|
||||
it('should return a 403 if unauthorized', async () => {
|
||||
const resp = await listMetrics('buckets', ['test'], getTs(-1), getTs(1), true);
|
||||
assert.strictEqual(resp.statusCode, 403);
|
||||
});
|
||||
describe('test service user credentials', () => {
|
||||
it('should list metrics for an account', async () => {
|
||||
const resp = await listMetrics('accounts', [account.id], getTs(-500), getTs(0), serviceUser);
|
||||
assert(Array.isArray(resp.body));
|
||||
const { body } = resp;
|
||||
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.accounts]), [account.id]);
|
||||
body.forEach(metric => {
|
||||
assertMetricResponse(metric, totals.accounts[account.canonicalId]);
|
||||
});
|
||||
});
|
||||
|
||||
it('should use the current timestamp for "end" if it is not provided', async () => {
|
||||
const resp = await listMetrics('buckets', ['test'], getTs(-1));
|
||||
assert.strictEqual(resp.body[0].timeRange.length, 2);
|
||||
it('should list metrics for a user', async () => {
|
||||
const resp = await listMetrics('users', [user.id], getTs(-500), getTs(0), serviceUser);
|
||||
assert(Array.isArray(resp.body));
|
||||
const { body } = resp;
|
||||
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.users]), [user.id]);
|
||||
body.forEach(metric => {
|
||||
assertMetricResponse(metric, totals.users[user.id]);
|
||||
});
|
||||
});
|
||||
|
||||
it('should list metrics for a bucket', async () => {
|
||||
const resp = await listMetrics('buckets', [bucket], getTs(-500), getTs(0), serviceUser);
|
||||
assert(Array.isArray(resp.body));
|
||||
const { body } = resp;
|
||||
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.buckets]), [bucket]);
|
||||
body.forEach(metric => {
|
||||
assertMetricResponse(metric, totals.buckets[bucket]);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
const assert = require('assert');
|
||||
const needle = require('needle');
|
||||
|
||||
const testMetrics = async pair => {
|
||||
const [name, url] = pair;
|
||||
it(`should return metrics for ${name} from url ${url}`, async () => {
|
||||
const res = await needle('get', url);
|
||||
const lines = res.body.split('\n');
|
||||
const first = lines[0];
|
||||
|
||||
assert.strictEqual(res.statusCode, 200);
|
||||
assert(first.startsWith('# HELP'));
|
||||
});
|
||||
};
|
||||
|
||||
describe('Test Prometheus Metrics', () => {
|
||||
const nameUrlPairs = [
|
||||
['utapi nodejs service exporter', 'http://localhost:8100/_/metrics'],
|
||||
['sensision exporter', 'http://localhost:9718/metrics'],
|
||||
['redis exporter', 'http://localhost:9121/metrics'],
|
||||
];
|
||||
nameUrlPairs.forEach(pair => testMetrics(pair));
|
||||
});
|
|
@ -1,7 +1,7 @@
|
|||
const assert = require('assert');
|
||||
const sinon = require('sinon');
|
||||
const uuid = require('uuid');
|
||||
const { mpuBucketPrefix } = require('arsenal').constants;
|
||||
const { constants: arsenalConstants, models: arsenalModels } = require('arsenal');
|
||||
|
||||
const { Warp10Client } = require('../../../../libV2/warp10');
|
||||
const { ReindexTask } = require('../../../../libV2/tasks');
|
||||
|
@ -9,7 +9,9 @@ const { now } = require('../../../../libV2/utils');
|
|||
const { BucketD, values } = require('../../../utils/mock/');
|
||||
const { fetchRecords } = require('../../../utils/v2Data');
|
||||
|
||||
const { CANONICAL_ID, BUCKET_NAME } = values;
|
||||
const { mpuBucketPrefix } = arsenalConstants;
|
||||
const { ObjectMD } = arsenalModels;
|
||||
const { CANONICAL_ID, BUCKET_NAME, OBJECT_KEY } = values;
|
||||
const bucketCounts = [1, 251];
|
||||
|
||||
const bucketRecord = {
|
||||
|
@ -120,6 +122,34 @@ describe('Test ReindexTask', function () {
|
|||
});
|
||||
});
|
||||
|
||||
describe('test invalid responses from bucketd', () => {
|
||||
let bucketDStub;
|
||||
beforeEach(() => {
|
||||
bucketDStub = sinon.stub(bucketd, '_getBucketResponse');
|
||||
});
|
||||
|
||||
afterEach(() => sinon.restore());
|
||||
|
||||
it('should skip object if content-length is not an integer', async () => {
|
||||
bucketDStub = bucketDStub.callsFake(
|
||||
() => {
|
||||
const metadata = new ObjectMD().getValue();
|
||||
// null value taken from error seen in the field
|
||||
metadata['content-length'] = null;
|
||||
return [
|
||||
{
|
||||
key: OBJECT_KEY,
|
||||
value: JSON.stringify(metadata),
|
||||
},
|
||||
];
|
||||
},
|
||||
);
|
||||
const resp = await ReindexTask._indexBucket('foo');
|
||||
assert.deepStrictEqual(resp, { size: 0, count: 0 });
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should avoid calculating incorrect reindex diffs', async () => {
|
||||
const bucketName = `${BUCKET_NAME}-1`;
|
||||
bucketd
|
||||
|
|
|
@ -0,0 +1,102 @@
|
|||
/* eslint-disable no-console */
|
||||
const assert = require('assert');
|
||||
const { exec } = require('child_process');
|
||||
const path = require('path');
|
||||
|
||||
const vaultclient = require('../../utils/vaultclient');
|
||||
|
||||
const ensureServiceUser = path.resolve(__dirname, '../../../bin/ensureServiceUser');
|
||||
|
||||
const expectedPolicyDocument = {
|
||||
Version: '2012-10-17',
|
||||
Statement: {
|
||||
Effect: 'Allow',
|
||||
Action: 'utapi:ListMetrics',
|
||||
Resource: 'arn:scality:utapi:::*/*',
|
||||
},
|
||||
};
|
||||
|
||||
async function execPath(path, args, env) {
|
||||
const proc = exec(`${path} ${args.join(' ')}`, {
|
||||
env,
|
||||
stdio: 'pipe',
|
||||
});
|
||||
proc.stdout.on('data', data => console.log(data.toString()));
|
||||
proc.stderr.on('data', data => console.error(data.toString()));
|
||||
return new Promise((resolve, reject) => {
|
||||
proc.on('error', err => reject(err));
|
||||
proc.on('exit', exitCode => {
|
||||
if (exitCode !== 0) {
|
||||
reject(new Error(`ensureServiceUser exited with non-zero code ${exitCode}`));
|
||||
return;
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Allow overriding the path to the node binary
|
||||
// useful to work around issues when running locally and using a node version manager
|
||||
const NODE_INTERPRETER = process.env.NODE_INTERPRETER ? process.env.NODE_INTERPRETER : 'node';
|
||||
|
||||
function executeScript(account) {
|
||||
return execPath(
|
||||
NODE_INTERPRETER,
|
||||
[ensureServiceUser, 'apply', 'service-utapi-user'],
|
||||
{
|
||||
AWS_ACCESS_KEY_ID: account.accessKey,
|
||||
AWS_SECRET_ACCESS_KEY: account.secretKey,
|
||||
AWS_REGION: 'us-east-1',
|
||||
NODE_TLS_REJECT_UNAUTHORIZED: '0',
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
describe('test bin/ensureServiceUser', () => {
|
||||
let adminAccount;
|
||||
|
||||
before(async () => {
|
||||
adminAccount = await vaultclient.createInternalServiceAccountAndKeys();
|
||||
});
|
||||
|
||||
after(() => vaultclient.cleanupAccountAndUsers(adminAccount));
|
||||
|
||||
beforeEach(() => executeScript(adminAccount));
|
||||
afterEach(() => vaultclient.cleanupUsers(adminAccount));
|
||||
|
||||
it('should create the service user and attach a policy', async () => {
|
||||
const res = await vaultclient.getInternalServiceUserAndPolicies(adminAccount);
|
||||
assert.strictEqual(res.policies.length, 1);
|
||||
assert.deepStrictEqual(res.policies[0].document, expectedPolicyDocument);
|
||||
});
|
||||
|
||||
it('should exit with success on subsequent runs', async () => {
|
||||
const res = await vaultclient.getInternalServiceUserAndPolicies(adminAccount);
|
||||
assert.strictEqual(res.policies.length, 1);
|
||||
assert.deepStrictEqual(res.policies[0].document, expectedPolicyDocument);
|
||||
await executeScript(adminAccount);
|
||||
const res2 = await vaultclient.getInternalServiceUserAndPolicies(adminAccount);
|
||||
assert.strictEqual(res2.policies.length, 1);
|
||||
assert.deepStrictEqual(res2.policies[0].document, expectedPolicyDocument);
|
||||
});
|
||||
|
||||
it('should create and attach a policy if the user already exists', async () => {
|
||||
const detached = await vaultclient.detachUserPolicies(adminAccount, { name: 'service-utapi-user' });
|
||||
assert.strictEqual(detached.length, 1);
|
||||
const client = vaultclient.getIAMClient(adminAccount);
|
||||
await Promise.all(detached.map(PolicyArn => client.deletePolicy({ PolicyArn }).promise()));
|
||||
await executeScript(adminAccount);
|
||||
|
||||
const res = await vaultclient.getInternalServiceUserAndPolicies(adminAccount);
|
||||
assert.strictEqual(res.policies.length, 1);
|
||||
assert.deepStrictEqual(res.policies[0].document, expectedPolicyDocument);
|
||||
});
|
||||
|
||||
it('should not create the policy if it already exists', async () => {
|
||||
await vaultclient.detachUserPolicies(adminAccount, { name: 'service-utapi-user' });
|
||||
await executeScript(adminAccount);
|
||||
const res = await vaultclient.getInternalServiceUserAndPolicies(adminAccount);
|
||||
assert.strictEqual(res.policies.length, 1);
|
||||
assert.deepStrictEqual(res.policies[0].document, expectedPolicyDocument);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,35 @@
|
|||
const assert = require('assert');
|
||||
const { RequestContext } = require('../../../../../libV2/models');
|
||||
const { templateRequest } = require('../../../../utils/v2Data');
|
||||
const prometheusMetrics = require('../../../../../libV2/server/API/internal/prometheusMetrics');
|
||||
|
||||
describe('Test prometheusMetrics', () => {
|
||||
const overrides = {
|
||||
swagger: {
|
||||
operation: {
|
||||
'x-router-controller': 'internal',
|
||||
'operationId': 'prometheusMetrics',
|
||||
},
|
||||
params: {},
|
||||
},
|
||||
};
|
||||
const ctx = new RequestContext(templateRequest(overrides));
|
||||
|
||||
before(async () => {
|
||||
await prometheusMetrics(ctx);
|
||||
});
|
||||
|
||||
it('should set statusCode to 200', () => {
|
||||
assert.strictEqual(ctx.results.statusCode, 200);
|
||||
});
|
||||
|
||||
it('should have a response body', () => {
|
||||
assert.strictEqual(typeof ctx.results.body, 'string');
|
||||
});
|
||||
|
||||
it('should contain metrics', () => {
|
||||
const lines = ctx.results.body.split('\n');
|
||||
const first = lines[0];
|
||||
assert(first.startsWith('# HELP'));
|
||||
});
|
||||
});
|
|
@ -2,6 +2,7 @@ const assert = require('assert');
|
|||
|
||||
const APIController = require('../../../../libV2/server/controller');
|
||||
const healthcheck = require('../../../../libV2/server/API/internal/healthcheck');
|
||||
const prometheusMetrics = require('../../../../libV2/server/API/internal/prometheusMetrics');
|
||||
|
||||
const { ResponseContainer } = require('../../../../libV2/models');
|
||||
const { ExpressResponseStub, templateRequest } = require('../../../utils/v2Data');
|
||||
|
@ -41,6 +42,7 @@ describe('Test APIController', () => {
|
|||
const handlers = APIController._collectHandlers('internal');
|
||||
assert.deepStrictEqual(handlers, {
|
||||
healthcheck,
|
||||
prometheusMetrics,
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ const { models, constants } = require('arsenal');
|
|||
const { CANONICAL_ID, BUCKET_NAME, OBJECT_KEY } = require('./values');
|
||||
|
||||
const { ObjectMD } = models;
|
||||
const { mpuBucketPrefix } = constants;
|
||||
|
||||
class BucketD {
|
||||
constructor(isV2 = false) {
|
||||
|
@ -49,7 +50,12 @@ class BucketD {
|
|||
const { splitter } = constants;
|
||||
const entry = {
|
||||
key: `${CANONICAL_ID}${splitter}${BUCKET_NAME}-${i + 1}`,
|
||||
value: JSON.stringify({ creationDate: new Date() }),
|
||||
value: JSON.stringify({
|
||||
creationDate: new Date(),
|
||||
name: `${BUCKET_NAME}-${i + 1}`,
|
||||
owner: CANONICAL_ID,
|
||||
ownerDisplayName: 'steve',
|
||||
}),
|
||||
};
|
||||
buckets.push(entry);
|
||||
}
|
||||
|
@ -57,6 +63,22 @@ class BucketD {
|
|||
return this;
|
||||
}
|
||||
|
||||
createBucketsWithOwner(buckets) {
|
||||
const { splitter } = constants;
|
||||
this._buckets = buckets.map(
|
||||
({ name, owner }) => ({
|
||||
key: `${owner}${splitter}${name}`,
|
||||
value: JSON.stringify({
|
||||
creationDate: new Date(),
|
||||
name,
|
||||
owner,
|
||||
ownerDisplayName: 'steve',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
return this._app;
|
||||
}
|
||||
|
||||
_getUsersBucketResponse(req) {
|
||||
const body = {
|
||||
CommonPrefixes: [],
|
||||
|
@ -129,14 +151,24 @@ class BucketD {
|
|||
});
|
||||
|
||||
this._app.get('/default/attributes/:bucketName', (req, res) => {
|
||||
const key = req.params.bucketName;
|
||||
const bucket = this._bucketContent[key];
|
||||
const { splitter } = constants;
|
||||
const { bucketName } = req.params;
|
||||
let filterKey = bucketName;
|
||||
if (bucketName.indexOf(mpuBucketPrefix) !== -1) {
|
||||
filterKey = bucketName.replace(mpuBucketPrefix, '');
|
||||
}
|
||||
const bucket = this._buckets
|
||||
.reduce(
|
||||
(prev, b) => (
|
||||
b.key.split(splitter)[1] === filterKey
|
||||
? JSON.parse(b.value)
|
||||
: prev),
|
||||
null,
|
||||
);
|
||||
if (bucket) {
|
||||
res.status(200).send({
|
||||
name: key,
|
||||
owner: CANONICAL_ID,
|
||||
ownerDisplayName: 'steve',
|
||||
creationDate: new Date(),
|
||||
...bucket,
|
||||
name: bucketName,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
@ -157,7 +189,9 @@ class BucketD {
|
|||
}
|
||||
|
||||
end() {
|
||||
this._server.close();
|
||||
if (this._server !== null) {
|
||||
this._server.close();
|
||||
}
|
||||
}
|
||||
|
||||
reset() {
|
||||
|
|
|
@ -0,0 +1,312 @@
|
|||
/* eslint-disable no-undef-init */
|
||||
const { IAM } = require('aws-sdk');
|
||||
const vaultclient = require('vaultclient');
|
||||
const fs = require('fs');
|
||||
const uuid = require('uuid');
|
||||
|
||||
const adminCredentials = {
|
||||
accessKey: 'D4IT2AWSB588GO5J9T00',
|
||||
secretKey: 'UEEu8tYlsOGGrgf4DAiSZD6apVNPUWqRiPG0nTB6',
|
||||
};
|
||||
|
||||
|
||||
const internalServiceAccountId = '000000000000';
|
||||
const internalServiceAccountName = 'scality-internal-services';
|
||||
const internalServiceAccountEmail = 'scality@internal';
|
||||
const internalServiceUserName = 'service-utapi-user';
|
||||
|
||||
class VaultClient {
|
||||
/**
|
||||
* Get endpoint information
|
||||
*
|
||||
* @return {object} Vault endpoint information
|
||||
*/
|
||||
static getEndpointInformation() {
|
||||
let host = '127.0.0.1';
|
||||
let port = 8600;
|
||||
let ca = undefined;
|
||||
let cert = undefined;
|
||||
let key = undefined;
|
||||
if (process.env.VAULT_ENDPOINT) {
|
||||
const res = /^https?:\/\/([^:]*)(:[0-9]+)?\/?$/.exec(
|
||||
process.env.VAULT_ENDPOINT,
|
||||
);
|
||||
// eslint-disable-next-line prefer-destructuring
|
||||
[host] = res[1];
|
||||
port = parseInt(res[2].substring(1), 10);
|
||||
const https = process.env.VAULT_ENDPOINT.startsWith('https://');
|
||||
if (https) {
|
||||
ca = fs.readFileSync(process.env.SSL_CA || '/conf/ca.crt',
|
||||
'ascii');
|
||||
cert = fs.readFileSync(process.env.SSL_CERT || '/conf/test.crt',
|
||||
'ascii');
|
||||
key = fs.readFileSync(process.env.SSL_KEY || '/conf/test.key',
|
||||
'ascii');
|
||||
}
|
||||
}
|
||||
return {
|
||||
host,
|
||||
port,
|
||||
ca,
|
||||
cert,
|
||||
key,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an admin client
|
||||
*
|
||||
* @return {vaultclient.Client} Vault client for admin calls
|
||||
*/
|
||||
static getAdminClient() {
|
||||
const info = VaultClient.getEndpointInformation();
|
||||
const adminClient = new vaultclient.Client(info.host, info.port,
|
||||
info.ca !== undefined, undefined, undefined, info.ca, false,
|
||||
adminCredentials.accessKey, adminCredentials.secretKey);
|
||||
return adminClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an s3 client
|
||||
*
|
||||
* @return {vaultclient.Client} Vault client for s3
|
||||
*/
|
||||
static getServiceClient() {
|
||||
const info = VaultClient.getEndpointInformation();
|
||||
const adminClient = new vaultclient.Client(info.host, info.port - 100,
|
||||
info.ca !== undefined, info.key, info.cert, info.ca);
|
||||
return adminClient;
|
||||
}
|
||||
|
||||
static getIAMClient(credentials) {
|
||||
const endpoint = process.env.VAULT_ENDPOINT || 'http://localhost:8600';
|
||||
const info = {
|
||||
endpoint,
|
||||
sslEnabled: false,
|
||||
region: 'us-east-1',
|
||||
apiVersion: '2010-05-08',
|
||||
signatureVersion: 'v4',
|
||||
accessKeyId: credentials.accessKey,
|
||||
secretAccessKey: credentials.secretKey,
|
||||
maxRetries: 0,
|
||||
};
|
||||
return new IAM(info);
|
||||
}
|
||||
|
||||
static async createAccount(name) {
|
||||
const client = VaultClient.getAdminClient();
|
||||
return new Promise((resolve, reject) =>
|
||||
client.createAccount(
|
||||
name,
|
||||
{ email: `${name}@example.com` },
|
||||
(err, res) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
resolve(res.account);
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
static async createAccountKeys(account) {
|
||||
const client = VaultClient.getAdminClient();
|
||||
return new Promise((resolve, reject) =>
|
||||
client.generateAccountAccessKey(
|
||||
account.name,
|
||||
(err, res) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
return resolve({
|
||||
accessKey: res.id,
|
||||
secretKey: res.value,
|
||||
});
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
static async createAccountAndKeys(name) {
|
||||
const account = await VaultClient.createAccount(name);
|
||||
const creds = await VaultClient.createAccountKeys(account);
|
||||
return {
|
||||
...account,
|
||||
...creds,
|
||||
};
|
||||
}
|
||||
|
||||
static async createUser(parentAccount, name) {
|
||||
const client = VaultClient.getIAMClient(parentAccount);
|
||||
const { User: user } = await client.createUser({ UserName: name }).promise();
|
||||
return {
|
||||
name,
|
||||
id: user.UserId,
|
||||
arn: user.Arn,
|
||||
account: user.Arn.split(':')[4],
|
||||
};
|
||||
}
|
||||
|
||||
static async createUserKeys(parentAccount, name) {
|
||||
const client = VaultClient.getIAMClient(parentAccount);
|
||||
const { AccessKey: creds } = await client.createAccessKey({ UserName: name }).promise();
|
||||
return {
|
||||
accessKey: creds.AccessKeyId,
|
||||
secretKey: creds.SecretAccessKey,
|
||||
};
|
||||
}
|
||||
|
||||
static async createUserAndKeys(parentAccount, name) {
|
||||
const user = await VaultClient.createUser(parentAccount, name);
|
||||
const creds = await VaultClient.createUserKeys(parentAccount, name);
|
||||
return {
|
||||
...user,
|
||||
...creds,
|
||||
};
|
||||
}
|
||||
|
||||
static templateUtapiPolicy(level, resource) {
|
||||
return JSON.stringify({
|
||||
Version: '2012-10-17',
|
||||
Statement: [
|
||||
{
|
||||
Sid: `utapiMetrics-${uuid.v4()}`.replace(/-/g, ''),
|
||||
Action: ['utapi:ListMetrics'],
|
||||
Effect: 'Allow',
|
||||
Resource: `arn:scality:utapi:::${level}/${resource}`,
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
static async createAndAttachUtapiPolicy(parentAccount, user, level, resource) {
|
||||
const client = VaultClient.getIAMClient(parentAccount);
|
||||
const PolicyDocument = VaultClient.templateUtapiPolicy(level, resource);
|
||||
const PolicyName = `utapi-test-policy-${uuid.v4()}`;
|
||||
const res = await client.createPolicy({ PolicyName, PolicyDocument }).promise();
|
||||
const { Arn: PolicyArn } = res.Policy;
|
||||
await client.attachUserPolicy({ PolicyArn, UserName: user.name }).promise();
|
||||
}
|
||||
|
||||
static async createInternalServiceAccount() {
|
||||
const client = VaultClient.getAdminClient();
|
||||
return new Promise((resolve, reject) =>
|
||||
client.createAccount(
|
||||
internalServiceAccountName,
|
||||
{
|
||||
email: internalServiceAccountEmail,
|
||||
externalAccountId: internalServiceAccountId,
|
||||
disableSeed: true,
|
||||
},
|
||||
(err, res) => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
return resolve(res.account);
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
static async createInternalServiceAccountAndKeys() {
|
||||
const account = await VaultClient.createInternalServiceAccount();
|
||||
const creds = await VaultClient.createAccountKeys(account);
|
||||
return {
|
||||
...account,
|
||||
...creds,
|
||||
};
|
||||
}
|
||||
|
||||
static async getUserByName(parentAccount, name) {
|
||||
const client = VaultClient.getIAMClient(parentAccount);
|
||||
const { User: user } = await client.getUser({ UserName: name }).promise();
|
||||
return {
|
||||
name,
|
||||
id: user.UserId,
|
||||
arn: user.Arn,
|
||||
account: user.Arn.split(':')[4],
|
||||
};
|
||||
}
|
||||
|
||||
static async getAttachedPolicies(parentAccount, user) {
|
||||
const client = VaultClient.getIAMClient(parentAccount);
|
||||
const res = await client.listAttachedUserPolicies({ UserName: user.name }).promise();
|
||||
const { AttachedPolicies: attached } = res;
|
||||
const policies = await Promise.all(
|
||||
attached.map(
|
||||
({ PolicyArn }) => client.getPolicyVersion({ PolicyArn, VersionId: 'v1' })
|
||||
.promise()
|
||||
.then(({ PolicyVersion }) => ({
|
||||
arn: PolicyArn,
|
||||
document: JSON.parse(decodeURIComponent(PolicyVersion.Document)),
|
||||
})),
|
||||
),
|
||||
);
|
||||
return policies;
|
||||
}
|
||||
|
||||
static async getInternalServiceUserAndPolicies(parentAccount) {
|
||||
const user = await VaultClient.getUserByName(parentAccount, internalServiceUserName);
|
||||
const policies = await VaultClient.getAttachedPolicies(parentAccount, user);
|
||||
return {
|
||||
...user,
|
||||
policies,
|
||||
};
|
||||
}
|
||||
|
||||
static async getAccountUsers(parentAccount) {
|
||||
const client = VaultClient.getIAMClient(parentAccount);
|
||||
const { Users } = await client.listUsers({}).promise();
|
||||
|
||||
return Users.map(user => ({
|
||||
arn: user.Arn,
|
||||
id: user.UserId,
|
||||
name: user.UserName,
|
||||
}));
|
||||
}
|
||||
|
||||
static async detachUserPolicies(parentAccount, user) {
|
||||
const client = VaultClient.getIAMClient(parentAccount);
|
||||
const policies = await VaultClient.getAttachedPolicies(parentAccount, user);
|
||||
return Promise.all(
|
||||
policies.map(policy => client.detachUserPolicy({
|
||||
PolicyArn: policy.arn,
|
||||
UserName: user.name,
|
||||
}).promise().then(() => policy.arn)),
|
||||
);
|
||||
}
|
||||
|
||||
static async deleteAccount(account) {
|
||||
return new Promise(
|
||||
(resolve, reject) => VaultClient
|
||||
.getAdminClient()
|
||||
.deleteAccount(
|
||||
account.name,
|
||||
(err, res) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
resolve(res);
|
||||
},
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
static async cleanupUsers(parentAccount) {
|
||||
const client = VaultClient.getIAMClient(parentAccount);
|
||||
const users = await VaultClient.getAccountUsers(parentAccount);
|
||||
await Promise.all(
|
||||
users.map(async user => {
|
||||
const detached = await VaultClient.detachUserPolicies(parentAccount, user);
|
||||
await Promise.all(detached.map(PolicyArn => client.deletePolicy({ PolicyArn }).promise()));
|
||||
await client.deleteUser({ UserName: user.name }).promise();
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
static async cleanupAccountAndUsers(parentAccount) {
|
||||
await VaultClient.cleanupUsers(parentAccount);
|
||||
await VaultClient.deleteAccount(parentAccount);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
module.exports = VaultClient;
|
|
@ -40,15 +40,54 @@
|
|||
'utapi.checkpoint' 'checkpoint_class' STORE
|
||||
'utapi.repair.correction' 'correction_class' STORE
|
||||
|
||||
// will read the key `max_snapshot_size@utapi/createSnapshot` from warp 10 config
|
||||
'max_snapshot_size' 'P6H' MACROCONFIGDEFAULT DURATION 'max_snapshot_size' STORE
|
||||
|
||||
// Fetch latest master snapshot
|
||||
$read_token $master_snapshot_class $filterLabels $endTimestamp @utapi/fetchFirstRecordBefore
|
||||
|
||||
|
||||
FIRSTTICK
|
||||
// If we found a snapshot, increment its timestamp so we start at the tick immediatly after
|
||||
<% DUP 0 > %>
|
||||
<% 1 + %> IFT
|
||||
'masterSnapshotTimestamp' STORE // Grab our ending timestamp from the last master snapshot (0 if no snapshots)
|
||||
|
||||
// If snapshot is longer than 6 hours change endTimestamp to be masterSnapshotTimestamp + 6 hours
|
||||
<% $endTimestamp $masterSnapshotTimestamp - $max_snapshot_size > %>
|
||||
<%
|
||||
// If there are no preceding snapshots
|
||||
<% $masterSnapshotTimestamp 0 == %>
|
||||
<%
|
||||
// Find the oldest checkpoint
|
||||
// 'No previous snapshots found, detecting start of checkpoints.' LOGMSG
|
||||
$auth_info ->JSON
|
||||
{ 'class' $checkpoint_class 'labels' $filterLabels } ->JSON
|
||||
@utapi/findOldestRecord 'oldestTimestamp' STORE
|
||||
// If no checkpoints are found
|
||||
<% $oldestTimestamp -1 == %>
|
||||
<%
|
||||
// 'No checkpoints found. Looking for corrections' LOGMSG
|
||||
$auth_info ->JSON
|
||||
{ 'class' $correction_class 'labels' $filterLabels } ->JSON
|
||||
@utapi/findOldestRecord 'oldestTimestamp' STORE
|
||||
<% $oldestTimestamp -1 == %>
|
||||
<%
|
||||
// 'No corrections or checkpoints found. Nothing to snapshot' LOGMSG
|
||||
STOP
|
||||
%> IFT
|
||||
%> IFT
|
||||
<% $oldestTimestamp $max_snapshot_size + $endTimestamp < %>
|
||||
<%
|
||||
$oldestTimestamp $max_snapshot_size + 'endTimestamp' STORE
|
||||
%> IFT
|
||||
%>
|
||||
<%
|
||||
$masterSnapshotTimestamp $max_snapshot_size + 'endTimestamp' STORE
|
||||
%> IFTE
|
||||
// 'Oversize snapshot detected using ' $endTimestamp TOSTRING + ' as end' + LOGMSG
|
||||
%> IFT
|
||||
|
||||
[
|
||||
"node"
|
||||
".app"
|
||||
|
@ -91,6 +130,8 @@
|
|||
$endTimestamp
|
||||
@utapi/fetchFirstRecordBefore
|
||||
VALUES 0 GET @utapi/decodeRecord 'record' STORE
|
||||
<% $record 'ops' GET ISNULL %>
|
||||
<% $record {} 'ops' PUT DROP %> IFT
|
||||
$fieldSnapshots $record $value PUT DROP
|
||||
%> FOREACH
|
||||
$previousSnapshots $fieldSnapshots $field PUT DROP
|
||||
|
@ -163,9 +204,6 @@
|
|||
$labelSnapshots $created $labelValue PUT DROP
|
||||
%> FOREACH
|
||||
|
||||
// $createdSnapshots ->JSON LOGMSG
|
||||
|
||||
// $endTimestamp TOSTRING LOGMSG
|
||||
// Fetch all checkpoints since last master snapshot til passed time
|
||||
{
|
||||
'token' $read_token
|
||||
|
@ -236,7 +274,6 @@
|
|||
$labelSnapshots $created $labelValue PUT DROP
|
||||
%> FOREACH
|
||||
|
||||
// $createdSnapshots ->JSON LOGMSG
|
||||
0 'snapshots' STORE
|
||||
// For each of our indexed fields
|
||||
$createdSnapshots KEYLIST
|
||||
|
|
324
yarn.lock
324
yarn.lock
|
@ -380,6 +380,11 @@
|
|||
resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.5.tgz#b14efa8852b7768d898906613c23f688713e02cd"
|
||||
integrity sha512-Q1y515GcOdTHgagaVFhHnIFQ38ygs/kmxdNpvpou+raI9UO3YZcHDngBSYKQklcKlvA7iuQlmIKbzvmxcOE9CQ==
|
||||
|
||||
"@types/emscripten@^1.38.0":
|
||||
version "1.39.6"
|
||||
resolved "https://registry.yarnpkg.com/@types/emscripten/-/emscripten-1.39.6.tgz#698b90fe60d44acf93c31064218fbea93fbfd85a"
|
||||
integrity sha512-H90aoynNhhkQP6DRweEjJp5vfUVdIj7tdPLsu7pq89vODD/lcugKfZOsfgwpvM6XUewEp2N5dCg1Uf3Qe55Dcg==
|
||||
|
||||
"@types/graphlib@^2.1.7":
|
||||
version "2.1.7"
|
||||
resolved "https://registry.yarnpkg.com/@types/graphlib/-/graphlib-2.1.7.tgz#e6a47a4f43511f5bad30058a669ce5ce93bfd823"
|
||||
|
@ -439,11 +444,43 @@
|
|||
resolved "https://registry.yarnpkg.com/@types/semver/-/semver-5.5.0.tgz#146c2a29ee7d3bae4bf2fcb274636e264c813c45"
|
||||
integrity sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ==
|
||||
|
||||
"@yarnpkg/fslib@2.4.0", "@yarnpkg/fslib@^2.1.0":
|
||||
version "2.4.0"
|
||||
resolved "https://registry.yarnpkg.com/@yarnpkg/fslib/-/fslib-2.4.0.tgz#a265b737cd089ef293ad964e06c143f5efd411a9"
|
||||
integrity sha512-CwffYY9owtl3uImNOn1K4jl5iIb/L16a9UZ9Q3lkBARk6tlUsPrNFX00eoUlFcLn49TTfd3zdN6higloGCyncw==
|
||||
dependencies:
|
||||
"@yarnpkg/libzip" "^2.2.1"
|
||||
tslib "^1.13.0"
|
||||
|
||||
"@yarnpkg/json-proxy@2.1.0":
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@yarnpkg/json-proxy/-/json-proxy-2.1.0.tgz#362a161678cd7dda74b47b4fc848a2f1730d16cd"
|
||||
integrity sha512-rOgCg2DkyviLgr80mUMTt9vzdf5RGOujQB26yPiXjlz4WNePLBshKlTNG9rKSoKQSOYEQcw6cUmosfOKDatrCw==
|
||||
dependencies:
|
||||
"@yarnpkg/fslib" "^2.1.0"
|
||||
tslib "^1.13.0"
|
||||
|
||||
"@yarnpkg/libzip@2.2.1", "@yarnpkg/libzip@^2.2.1":
|
||||
version "2.2.1"
|
||||
resolved "https://registry.yarnpkg.com/@yarnpkg/libzip/-/libzip-2.2.1.tgz#61c9b8b2499ee6bd9c4fcbf8248f68e07bd89948"
|
||||
integrity sha512-AYDJXrkzayoDd3ZlVgFJ+LyDX+Zj/cki3vxIpcYxejtgkl3aquVWOxlC0DD9WboBWsJFIP1MjrUbchLyh++/7A==
|
||||
dependencies:
|
||||
"@types/emscripten" "^1.38.0"
|
||||
tslib "^1.13.0"
|
||||
|
||||
"@yarnpkg/lockfile@^1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz#e77a97fbd345b76d83245edcd17d393b1b41fb31"
|
||||
integrity sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ==
|
||||
|
||||
"@yarnpkg/parsers@2.3.0":
|
||||
version "2.3.0"
|
||||
resolved "https://registry.yarnpkg.com/@yarnpkg/parsers/-/parsers-2.3.0.tgz#7b9564c6df02f4921d5cfe8287c4b648e93ea84b"
|
||||
integrity sha512-qgz0QUgOvnhtF92kaluIhIIKBUHlYlHUBQxqh5v9+sxEQvUeF6G6PKiFlzo3E6O99XwvNEGpVu1xZPoSGyGscQ==
|
||||
dependencies:
|
||||
js-yaml "^3.10.0"
|
||||
tslib "^1.13.0"
|
||||
|
||||
JSONStream@^1.0.0:
|
||||
version "1.3.5"
|
||||
resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0"
|
||||
|
@ -552,6 +589,14 @@ agentkeepalive@^4.1.3:
|
|||
depd "^1.1.2"
|
||||
humanize-ms "^1.2.1"
|
||||
|
||||
ajv@4.10.0:
|
||||
version "4.10.0"
|
||||
resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.10.0.tgz#7ae6169180eb199192a8b9a19fd0f47fc9ac8764"
|
||||
integrity sha1-euYWkYDrGZGSqLmhn9D0f8msh2Q=
|
||||
dependencies:
|
||||
co "^4.6.0"
|
||||
json-stable-stringify "^1.0.1"
|
||||
|
||||
ajv@6.12.2:
|
||||
version "6.12.2"
|
||||
resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.2.tgz#c629c5eced17baf314437918d2da88c99d5958cd"
|
||||
|
@ -665,31 +710,6 @@ arraybuffer.slice@~0.0.7:
|
|||
resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675"
|
||||
integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog==
|
||||
|
||||
arsenal@scality/Arsenal#580e25a:
|
||||
version "7.5.0"
|
||||
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/580e25a9e84acf2a2a8dec6c83f69f862804d85f"
|
||||
dependencies:
|
||||
"@hapi/joi" "^15.1.0"
|
||||
JSONStream "^1.0.0"
|
||||
ajv "6.12.2"
|
||||
async "~2.1.5"
|
||||
debug "~2.3.3"
|
||||
diskusage "^1.1.1"
|
||||
ioredis "4.9.5"
|
||||
ipaddr.js "1.9.1"
|
||||
level "~5.0.1"
|
||||
level-sublevel "~6.6.5"
|
||||
node-forge "^0.7.1"
|
||||
simple-glob "^0.2"
|
||||
socket.io "~2.3.0"
|
||||
socket.io-client "~2.3.0"
|
||||
utf8 "2.1.2"
|
||||
uuid "^3.0.1"
|
||||
werelogs scality/werelogs#0ff7ec82
|
||||
xml2js "~0.4.23"
|
||||
optionalDependencies:
|
||||
ioctl "2.0.0"
|
||||
|
||||
arsenal@scality/Arsenal#65966f5:
|
||||
version "7.7.0"
|
||||
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/65966f5ddf93b048906d14a8c26056abfd4c22ba"
|
||||
|
@ -716,13 +736,12 @@ arsenal@scality/Arsenal#65966f5:
|
|||
optionalDependencies:
|
||||
ioctl "2.0.0"
|
||||
|
||||
arsenal@scality/Arsenal#8ed8478:
|
||||
version "8.2.1"
|
||||
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/8ed84786fce31f603b1e8cd641b3b44b8f715b0b"
|
||||
arsenal@scality/Arsenal#c57cde8:
|
||||
version "8.1.4"
|
||||
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/c57cde88bb04fe9803ec08c3a883f6eb986e4149"
|
||||
dependencies:
|
||||
"@hapi/joi" "^15.1.0"
|
||||
JSONStream "^1.0.0"
|
||||
ajv "6.12.2"
|
||||
ajv "4.10.0"
|
||||
async "~2.6.1"
|
||||
aws-sdk "2.80.0"
|
||||
azure-storage "^2.1.0"
|
||||
|
@ -734,22 +753,48 @@ arsenal@scality/Arsenal#8ed8478:
|
|||
hdclient scality/hdclient#5145e04e5ed33e85106765b1caa90cd245ef482b
|
||||
https-proxy-agent "^2.2.0"
|
||||
ioredis "4.9.5"
|
||||
ipaddr.js "1.9.1"
|
||||
ipaddr.js "1.8.1"
|
||||
joi "^14.3.0"
|
||||
level "~5.0.1"
|
||||
level-sublevel "~6.6.5"
|
||||
mongodb "^3.0.1"
|
||||
node-forge "^0.7.1"
|
||||
prom-client "10.2.3"
|
||||
simple-glob "^0.2.0"
|
||||
socket.io "~2.2.0"
|
||||
socket.io-client "~2.2.0"
|
||||
sproxydclient "github:scality/sproxydclient#a6ec980"
|
||||
utf8 "3.0.0"
|
||||
uuid "^3.0.1"
|
||||
werelogs scality/werelogs#0ff7ec82
|
||||
xml2js "~0.4.16"
|
||||
optionalDependencies:
|
||||
ioctl "2.0.1"
|
||||
|
||||
arsenal@scality/Arsenal#ed446c5:
|
||||
version "7.7.0"
|
||||
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/ed446c569ce11cd8c109c43c633de2dcd201f8a1"
|
||||
dependencies:
|
||||
"@hapi/joi" "^15.1.0"
|
||||
JSONStream "^1.0.0"
|
||||
ajv "6.12.2"
|
||||
async "~2.1.5"
|
||||
debug "~2.6.9"
|
||||
diskusage "^1.1.1"
|
||||
ioredis "4.9.5"
|
||||
ipaddr.js "1.9.1"
|
||||
level "~5.0.1"
|
||||
level-sublevel "~6.6.5"
|
||||
node-forge "^0.7.1"
|
||||
simple-glob "^0.2"
|
||||
socket.io "~2.3.0"
|
||||
socket.io-client "~2.3.0"
|
||||
sproxydclient "github:scality/sproxydclient#30e7115"
|
||||
utf8 "3.0.0"
|
||||
utf8 "2.1.2"
|
||||
uuid "^3.0.1"
|
||||
werelogs scality/werelogs#0ff7ec82
|
||||
xml2js "~0.4.23"
|
||||
optionalDependencies:
|
||||
ioctl "2.0.1"
|
||||
ioctl "2.0.0"
|
||||
|
||||
asap@~2.0.3:
|
||||
version "2.0.6"
|
||||
|
@ -834,6 +879,21 @@ aws-sdk@2.80.0:
|
|||
xml2js "0.4.17"
|
||||
xmlbuilder "4.2.1"
|
||||
|
||||
aws-sdk@^2.1005.0:
|
||||
version "2.1035.0"
|
||||
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1035.0.tgz#89a34c5b1e76e8304201036bf5258bceeebf4137"
|
||||
integrity sha512-BjSGGZIQE/SCLDgj2T4AhtBG4A4NgXhV/Z/I/E7Mst/RpOepTqZGznUbgXTvO+Z3gKqx33jJa6mS7ZxStCb/Wg==
|
||||
dependencies:
|
||||
buffer "4.9.2"
|
||||
events "1.1.1"
|
||||
ieee754 "1.1.13"
|
||||
jmespath "0.15.0"
|
||||
querystring "0.2.0"
|
||||
sax "1.2.1"
|
||||
url "0.10.3"
|
||||
uuid "3.3.2"
|
||||
xml2js "0.4.19"
|
||||
|
||||
aws-sign2@~0.7.0:
|
||||
version "0.7.0"
|
||||
resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8"
|
||||
|
@ -891,6 +951,11 @@ base64-js@^1.0.2:
|
|||
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.1.tgz#58ece8cb75dd07e71ed08c736abc5fac4dbf8df1"
|
||||
integrity sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g==
|
||||
|
||||
base64id@1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/base64id/-/base64id-1.0.0.tgz#47688cb99bb6804f0e06d3e763b1c32e57d8e6b6"
|
||||
integrity sha1-R2iMuZu2gE8OBtPnY7HDLlfY5rY=
|
||||
|
||||
base64id@2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/base64id/-/base64id-2.0.0.tgz#2770ac6bc47d312af97a8bf9a634342e0cd25cb6"
|
||||
|
@ -1036,7 +1101,7 @@ bucketclient@scality/bucketclient:
|
|||
resolved "https://codeload.github.com/scality/bucketclient/tar.gz/97fe75a58b22e256856ea50fad72ca22a94d7096"
|
||||
dependencies:
|
||||
agentkeepalive "^4.1.3"
|
||||
arsenal scality/Arsenal#8ed8478
|
||||
arsenal scality/Arsenal#c57cde8
|
||||
werelogs scality/werelogs#351a2a3
|
||||
|
||||
buffer-equal-constant-time@1.0.1:
|
||||
|
@ -1058,6 +1123,15 @@ buffer@4.9.1:
|
|||
ieee754 "^1.1.4"
|
||||
isarray "^1.0.0"
|
||||
|
||||
buffer@4.9.2:
|
||||
version "4.9.2"
|
||||
resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8"
|
||||
integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==
|
||||
dependencies:
|
||||
base64-js "^1.0.2"
|
||||
ieee754 "^1.1.4"
|
||||
isarray "^1.0.0"
|
||||
|
||||
buffer@^5.1.0, buffer@^5.5.0, buffer@^5.6.0:
|
||||
version "5.6.0"
|
||||
resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.6.0.tgz#a31749dc7d81d84db08abf937b6b8c4033f62786"
|
||||
|
@ -1554,13 +1628,6 @@ debug@^4.3.1:
|
|||
dependencies:
|
||||
ms "2.1.2"
|
||||
|
||||
debug@~2.3.3:
|
||||
version "2.3.3"
|
||||
resolved "https://registry.yarnpkg.com/debug/-/debug-2.3.3.tgz#40c453e67e6e13c901ddec317af8986cda9eff8c"
|
||||
integrity sha1-QMRT5n5uE8kB3ewxeviYbNqe/4w=
|
||||
dependencies:
|
||||
ms "0.7.2"
|
||||
|
||||
debug@~4.1.0:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791"
|
||||
|
@ -1812,6 +1879,23 @@ end-of-stream@^1.0.0, end-of-stream@^1.1.0, end-of-stream@^1.4.1:
|
|||
dependencies:
|
||||
once "^1.4.0"
|
||||
|
||||
engine.io-client@~3.3.1:
|
||||
version "3.3.3"
|
||||
resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-3.3.3.tgz#aeb45695ced81b787a8a10c92b0bc226b1cb3c53"
|
||||
integrity sha512-PXIgpzb1brtBzh8Q6vCjzCMeu4nfEPmaDm+L3Qb2sVHwLkxC1qRiBMSjOB0NJNjZ0hbPNUKQa+s8J2XxLOIEeQ==
|
||||
dependencies:
|
||||
component-emitter "1.2.1"
|
||||
component-inherit "0.0.3"
|
||||
debug "~3.1.0"
|
||||
engine.io-parser "~2.1.1"
|
||||
has-cors "1.1.0"
|
||||
indexof "0.0.1"
|
||||
parseqs "0.0.5"
|
||||
parseuri "0.0.5"
|
||||
ws "~6.1.0"
|
||||
xmlhttprequest-ssl "~1.6.3"
|
||||
yeast "0.1.2"
|
||||
|
||||
engine.io-client@~3.4.0:
|
||||
version "3.4.4"
|
||||
resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-3.4.4.tgz#77d8003f502b0782dd792b073a4d2cf7ca5ab967"
|
||||
|
@ -1829,6 +1913,17 @@ engine.io-client@~3.4.0:
|
|||
xmlhttprequest-ssl "~1.5.4"
|
||||
yeast "0.1.2"
|
||||
|
||||
engine.io-parser@~2.1.0, engine.io-parser@~2.1.1:
|
||||
version "2.1.3"
|
||||
resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-2.1.3.tgz#757ab970fbf2dfb32c7b74b033216d5739ef79a6"
|
||||
integrity sha512-6HXPre2O4Houl7c4g7Ic/XzPnHBvaEmN90vtRO9uLmwtRqQmTOw0QMevL1TOfL2Cpu1VzsaTmMotQgMdkzGkVA==
|
||||
dependencies:
|
||||
after "0.8.2"
|
||||
arraybuffer.slice "~0.0.7"
|
||||
base64-arraybuffer "0.1.5"
|
||||
blob "0.0.5"
|
||||
has-binary2 "~1.0.2"
|
||||
|
||||
engine.io-parser@~2.2.0:
|
||||
version "2.2.1"
|
||||
resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-2.2.1.tgz#57ce5611d9370ee94f99641b589f94c97e4f5da7"
|
||||
|
@ -1840,6 +1935,18 @@ engine.io-parser@~2.2.0:
|
|||
blob "0.0.5"
|
||||
has-binary2 "~1.0.2"
|
||||
|
||||
engine.io@~3.3.1:
|
||||
version "3.3.2"
|
||||
resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-3.3.2.tgz#18cbc8b6f36e9461c5c0f81df2b830de16058a59"
|
||||
integrity sha512-AsaA9KG7cWPXWHp5FvHdDWY3AMWeZ8x+2pUVLcn71qE5AtAzgGbxuclOytygskw8XGmiQafTmnI9Bix3uihu2w==
|
||||
dependencies:
|
||||
accepts "~1.3.4"
|
||||
base64id "1.0.0"
|
||||
cookie "0.3.1"
|
||||
debug "~3.1.0"
|
||||
engine.io-parser "~2.1.0"
|
||||
ws "~6.1.0"
|
||||
|
||||
engine.io@~3.4.0:
|
||||
version "3.4.2"
|
||||
resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-3.4.2.tgz#8fc84ee00388e3e228645e0a7d3dfaeed5bd122c"
|
||||
|
@ -2129,6 +2236,11 @@ event-loop-spinner@^2.0.0:
|
|||
dependencies:
|
||||
tslib "^1.10.0"
|
||||
|
||||
events@1.1.1:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/events/-/events-1.1.1.tgz#9ebdb7635ad099c70dcc4c2a1f5004288e8bd924"
|
||||
integrity sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=
|
||||
|
||||
execa@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8"
|
||||
|
@ -2619,6 +2731,11 @@ he@1.1.1:
|
|||
resolved "https://registry.yarnpkg.com/he/-/he-1.1.1.tgz#93410fd21b009735151f8868c2f271f3427e23fd"
|
||||
integrity sha1-k0EP0hsAlzUVH4howvJx80J+I/0=
|
||||
|
||||
hoek@6.x.x:
|
||||
version "6.1.3"
|
||||
resolved "https://registry.yarnpkg.com/hoek/-/hoek-6.1.3.tgz#73b7d33952e01fe27a38b0457294b79dd8da242c"
|
||||
integrity sha512-YXXAAhmF9zpQbC7LEcREFtXfGq5K1fmd+4PHkBq8NUqmzW3G+Dq10bI/i0KucLRwss3YYFQ0fSfoxBZYiGUqtQ==
|
||||
|
||||
hosted-git-info@^2.1.4, hosted-git-info@^2.7.1:
|
||||
version "2.8.8"
|
||||
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.8.tgz#7539bd4bc1e0e0a895815a2e0262420b12858488"
|
||||
|
@ -2713,7 +2830,7 @@ iconv-lite@0.4.24, iconv-lite@^0.4.24, iconv-lite@^0.4.4:
|
|||
dependencies:
|
||||
safer-buffer ">= 2.1.2 < 3"
|
||||
|
||||
ieee754@^1.1.4:
|
||||
ieee754@1.1.13, ieee754@^1.1.4:
|
||||
version "1.1.13"
|
||||
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84"
|
||||
integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==
|
||||
|
@ -2885,6 +3002,11 @@ ip@1.1.5, ip@^1.1.5:
|
|||
resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a"
|
||||
integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=
|
||||
|
||||
ipaddr.js@1.8.1:
|
||||
version "1.8.1"
|
||||
resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.8.1.tgz#fa4b79fa47fd3def5e3b159825161c0a519c9427"
|
||||
integrity sha1-+kt5+kf9Pe9eOxWYJRYcClGclCc=
|
||||
|
||||
ipaddr.js@1.9.1:
|
||||
version "1.9.1"
|
||||
resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3"
|
||||
|
@ -3095,6 +3217,13 @@ isarray@^1.0.0, isarray@~1.0.0:
|
|||
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
|
||||
integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=
|
||||
|
||||
isemail@3.x.x:
|
||||
version "3.2.0"
|
||||
resolved "https://registry.yarnpkg.com/isemail/-/isemail-3.2.0.tgz#59310a021931a9fb06bbb51e155ce0b3f236832c"
|
||||
integrity sha512-zKqkK+O+dGqevc93KNsbZ/TqTUFd46MwWjYOoMrjIMZ51eU7DtQG3Wmd9SQQT7i7RVnuTPEiYEWHU3MSbxC1Tg==
|
||||
dependencies:
|
||||
punycode "2.x.x"
|
||||
|
||||
isexe@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
|
||||
|
@ -3110,11 +3239,28 @@ jmespath@0.15.0:
|
|||
resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.15.0.tgz#a3f222a9aae9f966f5d27c796510e28091764217"
|
||||
integrity sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=
|
||||
|
||||
joi@^14.3.0:
|
||||
version "14.3.1"
|
||||
resolved "https://registry.yarnpkg.com/joi/-/joi-14.3.1.tgz#164a262ec0b855466e0c35eea2a885ae8b6c703c"
|
||||
integrity sha512-LQDdM+pkOrpAn4Lp+neNIFV3axv1Vna3j38bisbQhETPMANYRbFJFUyOZcOClYvM/hppMhGWuKSFEK9vjrB+bQ==
|
||||
dependencies:
|
||||
hoek "6.x.x"
|
||||
isemail "3.x.x"
|
||||
topo "3.x.x"
|
||||
|
||||
js-tokens@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
|
||||
integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==
|
||||
|
||||
js-yaml@^3.10.0:
|
||||
version "3.14.1"
|
||||
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537"
|
||||
integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==
|
||||
dependencies:
|
||||
argparse "^1.0.7"
|
||||
esprima "^4.0.0"
|
||||
|
||||
js-yaml@^3.13.0, js-yaml@^3.13.1, js-yaml@^3.14.0:
|
||||
version "3.14.0"
|
||||
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.0.tgz#a7a34170f26a21bb162424d8adacb4113a69e482"
|
||||
|
@ -3192,6 +3338,13 @@ json-stable-stringify-without-jsonify@^1.0.1:
|
|||
resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651"
|
||||
integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=
|
||||
|
||||
json-stable-stringify@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz#9a759d39c5f2ff503fd5300646ed445f88c4f9af"
|
||||
integrity sha1-mnWdOcXy/1A/1TAGRu1EX4jE+a8=
|
||||
dependencies:
|
||||
jsonify "~0.0.0"
|
||||
|
||||
json-stringify-safe@~5.0.1:
|
||||
version "5.0.1"
|
||||
resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
|
||||
|
@ -3202,6 +3355,11 @@ json3@3.3.2:
|
|||
resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.2.tgz#3c0434743df93e2f5c42aee7b19bcb483575f4e1"
|
||||
integrity sha1-PAQ0dD35Pi9cQq7nsZvLSDV19OE=
|
||||
|
||||
jsonify@~0.0.0:
|
||||
version "0.0.0"
|
||||
resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73"
|
||||
integrity sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM=
|
||||
|
||||
jsonparse@^1.2.0:
|
||||
version "1.3.1"
|
||||
resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280"
|
||||
|
@ -3953,11 +4111,6 @@ mpath@~0.5.0:
|
|||
resolved "https://registry.yarnpkg.com/mpath/-/mpath-0.5.2.tgz#b1eac586dffb5175d2f51ca9aacba35d9940dd41"
|
||||
integrity sha512-NOeCoW6AYc3hLi30npe7uzbD9b4FQZKH40YKABUCCvaKKL5agj6YzvHoNx8jQpDMNPgIa5bvSZQbQpWBAVD0Kw==
|
||||
|
||||
ms@0.7.2:
|
||||
version "0.7.2"
|
||||
resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.2.tgz#ae25cf2512b3885a1d95d7f037868d8431124765"
|
||||
integrity sha1-riXPJRKziFodldfwN4aNhDESR2U=
|
||||
|
||||
ms@2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
|
||||
|
@ -4551,6 +4704,13 @@ prom-client@10.2.3:
|
|||
dependencies:
|
||||
tdigest "^0.1.1"
|
||||
|
||||
prom-client@^13.1.0:
|
||||
version "13.1.0"
|
||||
resolved "https://registry.yarnpkg.com/prom-client/-/prom-client-13.1.0.tgz#1185caffd8691e28d32e373972e662964e3dba45"
|
||||
integrity sha512-jT9VccZCWrJWXdyEtQddCDszYsiuWj5T0ekrPszi/WEegj3IZy6Mm09iOOVM86A4IKMWq8hZkT2dD9MaSe+sng==
|
||||
dependencies:
|
||||
tdigest "^0.1.1"
|
||||
|
||||
promise-fs@^2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/promise-fs/-/promise-fs-2.1.1.tgz#0b725a592c165ff16157d1f13640ba390637e557"
|
||||
|
@ -4714,7 +4874,7 @@ punycode@1.3.2:
|
|||
resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d"
|
||||
integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=
|
||||
|
||||
punycode@^2.1.0, punycode@^2.1.1:
|
||||
punycode@2.x.x, punycode@^2.1.0, punycode@^2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
|
||||
integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
|
||||
|
@ -5523,6 +5683,26 @@ socket.io-adapter@~1.1.0:
|
|||
resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-1.1.2.tgz#ab3f0d6f66b8fc7fca3959ab5991f82221789be9"
|
||||
integrity sha512-WzZRUj1kUjrTIrUKpZLEzFZ1OLj5FwLlAFQs9kuZJzJi5DKdU7FsWc36SNmA8iDOtwBQyT8FkrriRM8vXLYz8g==
|
||||
|
||||
socket.io-client@2.2.0, socket.io-client@~2.2.0:
|
||||
version "2.2.0"
|
||||
resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-2.2.0.tgz#84e73ee3c43d5020ccc1a258faeeb9aec2723af7"
|
||||
integrity sha512-56ZrkTDbdTLmBIyfFYesgOxsjcLnwAKoN4CiPyTVkMQj3zTUh0QAx3GbvIvLpFEOvQWu92yyWICxB0u7wkVbYA==
|
||||
dependencies:
|
||||
backo2 "1.0.2"
|
||||
base64-arraybuffer "0.1.5"
|
||||
component-bind "1.0.0"
|
||||
component-emitter "1.2.1"
|
||||
debug "~3.1.0"
|
||||
engine.io-client "~3.3.1"
|
||||
has-binary2 "~1.0.2"
|
||||
has-cors "1.1.0"
|
||||
indexof "0.0.1"
|
||||
object-component "0.0.3"
|
||||
parseqs "0.0.5"
|
||||
parseuri "0.0.5"
|
||||
socket.io-parser "~3.3.0"
|
||||
to-array "0.1.4"
|
||||
|
||||
socket.io-client@2.3.0:
|
||||
version "2.3.0"
|
||||
resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-2.3.0.tgz#14d5ba2e00b9bcd145ae443ab96b3f86cbcc1bb4"
|
||||
|
@ -5578,6 +5758,18 @@ socket.io-parser@~3.4.0:
|
|||
debug "~4.1.0"
|
||||
isarray "2.0.1"
|
||||
|
||||
socket.io@~2.2.0:
|
||||
version "2.2.0"
|
||||
resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-2.2.0.tgz#f0f633161ef6712c972b307598ecd08c9b1b4d5b"
|
||||
integrity sha512-wxXrIuZ8AILcn+f1B4ez4hJTPG24iNgxBBDaJfT6MsyOhVYiTXWexGoPkd87ktJG8kQEcL/NBvRi64+9k4Kc0w==
|
||||
dependencies:
|
||||
debug "~4.1.0"
|
||||
engine.io "~3.3.1"
|
||||
has-binary2 "~1.0.2"
|
||||
socket.io-adapter "~1.1.0"
|
||||
socket.io-client "2.2.0"
|
||||
socket.io-parser "~3.3.0"
|
||||
|
||||
socket.io@~2.3.0:
|
||||
version "2.3.0"
|
||||
resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-2.3.0.tgz#cd762ed6a4faeca59bc1f3e243c0969311eb73fb"
|
||||
|
@ -5667,12 +5859,11 @@ sprintf-js@~1.0.2:
|
|||
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
|
||||
integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=
|
||||
|
||||
"sproxydclient@github:scality/sproxydclient#30e7115":
|
||||
version "8.0.2"
|
||||
resolved "https://codeload.github.com/scality/sproxydclient/tar.gz/30e7115668bc7e10b4ec3cfdbaa7a124cdc21cc5"
|
||||
"sproxydclient@github:scality/sproxydclient#a6ec980":
|
||||
version "7.4.0"
|
||||
resolved "https://codeload.github.com/scality/sproxydclient/tar.gz/a6ec98079fcbfde113de3f3afdcb57835d2ac55f"
|
||||
dependencies:
|
||||
async "^3.1.0"
|
||||
werelogs scality/werelogs#351a2a3
|
||||
werelogs scality/werelogs#0ff7ec82
|
||||
|
||||
ssh2-streams@~0.4.10:
|
||||
version "0.4.10"
|
||||
|
@ -6016,6 +6207,13 @@ toml@^3.0.0:
|
|||
resolved "https://registry.yarnpkg.com/toml/-/toml-3.0.0.tgz#342160f1af1904ec9d204d03a5d61222d762c5ee"
|
||||
integrity sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w==
|
||||
|
||||
topo@3.x.x:
|
||||
version "3.0.3"
|
||||
resolved "https://registry.yarnpkg.com/topo/-/topo-3.0.3.tgz#d5a67fb2e69307ebeeb08402ec2a2a6f5f7ad95c"
|
||||
integrity sha512-IgpPtvD4kjrJ7CRA3ov2FhWQADwv+Tdqbsf1ZnPUSAtCJ9e1Z44MmoSGDXGk4IppoZA7jd/QRkNddlLJWlUZsQ==
|
||||
dependencies:
|
||||
hoek "6.x.x"
|
||||
|
||||
touch@^3.1.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/touch/-/touch-3.1.0.tgz#fe365f5f75ec9ed4e56825e0bb76d24ab74af83b"
|
||||
|
@ -6241,6 +6439,11 @@ uuid@3.0.1:
|
|||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.0.1.tgz#6544bba2dfda8c1cf17e629a3a305e2bb1fee6c1"
|
||||
integrity sha1-ZUS7ot/ajBzxfmKaOjBeK7H+5sE=
|
||||
|
||||
uuid@3.3.2:
|
||||
version "3.3.2"
|
||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131"
|
||||
integrity sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==
|
||||
|
||||
uuid@^3.0.0, uuid@^3.0.1, uuid@^3.3.2:
|
||||
version "3.4.0"
|
||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee"
|
||||
|
@ -6284,12 +6487,12 @@ vary@~1.1.2:
|
|||
resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc"
|
||||
integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=
|
||||
|
||||
vaultclient@scality/vaultclient#ff9e92f:
|
||||
vaultclient@scality/vaultclient#8c4d210:
|
||||
version "7.5.1"
|
||||
resolved "https://codeload.github.com/scality/vaultclient/tar.gz/ff9e92fd8e35c16bbc20f5e4ee0531d8bffedbf2"
|
||||
resolved "https://codeload.github.com/scality/vaultclient/tar.gz/8c4d2109709a3dc7940fb6018aa0be1f33e4baac"
|
||||
dependencies:
|
||||
agentkeepalive "^4.1.3"
|
||||
arsenal scality/Arsenal#580e25a
|
||||
arsenal scality/Arsenal#ed446c5
|
||||
commander "2.20.0"
|
||||
werelogs scality/werelogs#4e0d97c
|
||||
xml2js "0.4.19"
|
||||
|
@ -6476,7 +6679,7 @@ xml2js@0.4.19:
|
|||
sax ">=0.6.0"
|
||||
xmlbuilder "~9.0.1"
|
||||
|
||||
xml2js@0.4.23, xml2js@^0.4.17, xml2js@~0.4.23:
|
||||
xml2js@0.4.23, xml2js@^0.4.17, xml2js@~0.4.16, xml2js@~0.4.23:
|
||||
version "0.4.23"
|
||||
resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.23.tgz#a0c69516752421eb2ac758ee4d4ccf58843eac66"
|
||||
integrity sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==
|
||||
|
@ -6506,6 +6709,11 @@ xmlhttprequest-ssl@~1.5.4:
|
|||
resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.5.tgz#c2876b06168aadc40e57d97e81191ac8f4398b3e"
|
||||
integrity sha1-wodrBhaKrcQOV9l+gRkayPQ5iz4=
|
||||
|
||||
xmlhttprequest-ssl@~1.6.3:
|
||||
version "1.6.3"
|
||||
resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.6.3.tgz#03b713873b01659dfa2c1c5d056065b27ddc2de6"
|
||||
integrity sha512-3XfeQE/wNkvrIktn2Kf0869fC0BN6UpydVasGIeSm2B1Llihf7/0UfZM+eCkOw3P7bP4+qPgqhm7ZoxuJtFU0Q==
|
||||
|
||||
xregexp@2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/xregexp/-/xregexp-2.0.0.tgz#52a63e56ca0b84a7f3a5f3d61872f126ad7a5943"
|
||||
|
|
Loading…
Reference in New Issue