Compare commits

...

10 Commits

Author SHA1 Message Date
Taylor McKinnon ec87bf06a6 ft(UTAPI-51): regen lockfile 2021-11-19 11:52:43 -08:00
Taylor McKinnon 6d66d17fba bf(UTAPI-49): Fix event filter config schema
(cherry picked from commit 622026e0c6)
2021-11-19 11:51:16 -08:00
Taylor McKinnon dc660b923f ft(UTAPI-43): Add allow/deny filter for events
(cherry picked from commit 6c53e19ce2)
2021-11-19 11:51:16 -08:00
Taylor McKinnon 61309e05ef impr(UTAPI-36): Bump vault cpu request and limit
(cherry picked from commit 5668d16c2e)
2021-11-19 11:51:16 -08:00
Taylor McKinnon cf28133d6c ft(UTAPI-33): Add ensureServiceUser script
(cherry picked from commit 2d876c17cf)
2021-11-19 11:51:16 -08:00
Taylor McKinnon 21da18bf47 impr(UTAPI-32): Change service user arnPrefix to full arn
(cherry picked from commit e8519ceebb)
2021-11-19 11:51:16 -08:00
Taylor McKinnon 1ac6dd9b7c bf(UTAPI-29): Fix bucketd tls config
(cherry picked from commit cf7b302414)
2021-11-19 11:51:16 -08:00
Taylor McKinnon a606edab21 ft(UTAPI-26): Add authorization for service user
(cherry picked from commit 5bef96367c)
2021-11-19 11:51:16 -08:00
Taylor McKinnon 10004b27cf ft(UTAPI-24): Limit user credentials
(cherry picked from commit 774aaef0dd)
2021-11-19 11:51:16 -08:00
Taylor McKinnon 236e2e4781 ft(UTAPI-23): Limit account level credentials
(cherry picked from commit 0e947f255b)
2021-11-19 11:51:14 -08:00
32 changed files with 2038 additions and 511 deletions

276
bin/ensureServiceUser Executable file
View File

@ -0,0 +1,276 @@
#! /usr/bin/env node
// TODO
// - deduplicate with Vault's seed script at https://github.com/scality/Vault/pull/1627
// - add permission boundaries to user when https://scality.atlassian.net/browse/VAULT-4 is implemented
const { errors } = require('arsenal');
const program = require('commander');
const werelogs = require('werelogs');
const async = require('async');
const { IAM } = require('aws-sdk');
const { version } = require('../package.json');
const systemPrefix = '/scality-internal/';
function generateUserPolicyDocument() {
return {
Version: '2012-10-17',
Statement: {
Effect: 'Allow',
Action: 'utapi:ListMetrics',
Resource: 'arn:scality:utapi:::*/*',
},
};
}
function createIAMClient(opts) {
return new IAM({
endpoint: opts.iamEndpoint,
});
}
function needsCreation(v) {
if (Array.isArray(v)) {
return !v.length;
}
return !v;
}
class BaseHandler {
constructor(serviceName, iamClient, log) {
this.serviceName = serviceName;
this.iamClient = iamClient;
this.log = log;
}
applyWaterfall(values, done) {
this.log.debug('applyWaterfall', { values, type: this.resourceType });
const v = values[this.resourceType];
if (needsCreation(v)) {
this.log.debug('creating', { v, type: this.resourceType });
return this.create(values)
.then(res =>
done(null, Object.assign(values, {
[this.resourceType]: res,
})))
.catch(done);
}
this.log.debug('conflicts check', { v, type: this.resourceType });
if (this.conflicts(v)) {
return done(errors.EntityAlreadyExists.customizeDescription(
`${this.resourceType} ${this.serviceName} already exists and conflicts with the expected value.`));
}
this.log.debug('nothing to do', { v, type: this.resourceType });
return done(null, values);
}
}
class UserHandler extends BaseHandler {
get resourceType() {
return 'user';
}
collect() {
return this.iamClient.getUser({
UserName: this.serviceName,
})
.promise()
.then(res => res.User);
}
create(allResources) {
return this.iamClient.createUser({
UserName: this.serviceName,
Path: systemPrefix,
})
.promise()
.then(res => res.User);
}
conflicts(u) {
return u.Path !== systemPrefix;
}
}
class PolicyHandler extends BaseHandler {
get resourceType() {
return 'policy';
}
collect() {
return this.iamClient.listPolicies({
MaxItems: 100,
OnlyAttached: false,
Scope: 'All',
})
.promise()
.then(res => res.Policies.find(p => p.PolicyName === this.serviceName));
}
create(allResources) {
const doc = generateUserPolicyDocument();
return this.iamClient.createPolicy({
PolicyName: this.serviceName,
PolicyDocument: JSON.stringify(doc),
Path: systemPrefix,
})
.promise()
.then(res => res.Policy);
}
conflicts(p) {
return p.Path !== systemPrefix;
}
}
class PolicyAttachmentHandler extends BaseHandler {
get resourceType() {
return 'policyAttachment';
}
collect() {
return this.iamClient.listAttachedUserPolicies({
UserName: this.serviceName,
MaxItems: 100,
})
.promise()
.then(res => res.AttachedPolicies)
}
create(allResources) {
return this.iamClient.attachUserPolicy({
PolicyArn: allResources.policy.Arn,
UserName: this.serviceName,
})
.promise();
}
conflicts(p) {
return false;
}
}
class AccessKeyHandler extends BaseHandler {
get resourceType() {
return 'accessKey';
}
collect() {
return this.iamClient.listAccessKeys({
UserName: this.serviceName,
MaxItems: 100,
})
.promise()
.then(res => res.AccessKeyMetadata)
}
create(allResources) {
return this.iamClient.createAccessKey({
UserName: this.serviceName,
})
.promise()
.then(res => res.AccessKey);
}
conflicts(a) {
return false;
}
}
function collectResource(v, done) {
v.collect()
.then(res => done(null, res))
.catch(err => {
if (err.code === 'NoSuchEntity') {
return done(null, null);
}
done(err);
});
}
function collectResourcesFromHandlers(handlers, cb) {
const tasks = handlers.reduce((acc, v) => ({
[v.resourceType]: done => collectResource(v, done),
...acc,
}), {});
async.parallel(tasks, cb);
}
function buildServiceUserHandlers(serviceName, client, log) {
return [
UserHandler,
PolicyHandler,
PolicyAttachmentHandler,
AccessKeyHandler,
].map(h => new h(serviceName, client, log));
}
function apply(client, serviceName, log, cb) {
const handlers = buildServiceUserHandlers(serviceName, client, log);
async.waterfall([
done => collectResourcesFromHandlers(handlers, done),
...handlers.map(h => h.applyWaterfall.bind(h)),
(values, done) => done(null, values.accessKey),
], cb);
}
function wrapAction(actionFunc, serviceName, options) {
werelogs.configure({
level: options.logLevel,
dump: options.logDumpLevel,
});
const log = new werelogs.Logger(process.argv[1]).newRequestLogger();
const client = createIAMClient(options);
actionFunc(client, serviceName, log, (err, data) => {
if (err) {
log.error('failed', {
data,
error: err,
});
if (err.EntityAlreadyExists) {
log.error(`run "${process.argv[1]} purge ${serviceName}" to fix.`);
}
process.exit(1);
}
log.info('success', { data });
process.exit();
});
}
program.version(version);
[
{
name: 'apply <service-name>',
actionFunc: apply,
},
].forEach(cmd => {
program
.command(cmd.name)
.option('--iam-endpoint <url>', 'IAM endpoint', 'http://localhost:8600')
.option('--log-level <level>', 'log level', 'info')
.option('--log-dump-level <level>', 'log level that triggers a dump of the debug buffer', 'error')
.action(wrapAction.bind(null, cmd.actionFunc));
});
const validCommands = program.commands.map(n => n._name);
// Is the command given invalid or are there too few arguments passed
if (!validCommands.includes(process.argv[2])) {
program.outputHelp();
process.stdout.write('\n');
process.exit(1);
} else {
program.parse(process.argv);
}

View File

@ -25,7 +25,6 @@ models:
redis:
context: '.'
dockerfile: 'images/redis/Dockerfile'
vault: eve/workers/mocks/vault
- Upload: &upload_artifacts
source: /artifacts
urls:
@ -36,6 +35,28 @@ models:
command: |
git lfs pull
haltOnFailure: True
- ShellCommand: &wait_for_vault
name: Wait for Vault
command: |
bash -c "
set -ex
bash tests/utils/wait_for_local_port.bash 8500 60"
haltOnFailure: true
logfiles:
vault:
filename: "/artifacts/vault.log"
follow: true
- ShellCommand: &wait_for_warp10
name: Wait for Warp 10
command: |
bash -c "
set -ex
bash tests/utils/wait_for_local_port.bash 4802 60"
haltOnFailure: true
logfiles:
warp10:
filename: "/artifacts/warp10.log"
follow: true
stages:
pre-merge:
@ -108,6 +129,8 @@ stages:
- ShellCommand:
name: run cron tests
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash false ft_test:cron
env:
REINDEX_PYTHON_INTERPRETER: python3
logfiles:
utapi:
filename: "/artifacts/setup_ft_test:cron.log"
@ -132,21 +155,14 @@ stages:
steps:
- Git: *clone
- ShellCommand: *fetch_lfs
- ShellCommand:
name: Wait for Warp 10
command: |
bash -c "
set -ex
bash tests/utils/wait_for_local_port.bash 4802 60"
logfiles:
warp10:
filename: "/artifacts/warp10.log"
follow: true
- ShellCommand: *wait_for_vault
- ShellCommand: *wait_for_warp10
- ShellCommand:
name: run v2 functional tests
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash true ft_test:v2
env:
UTAPI_CACHE_BACKEND: redis
UTAPI_SERVICE_USER_ENABLED: 'true'
UTAPI_LOG_LEVEL: trace
SETUP_CMD: "run start_v2:server"
logfiles:
@ -156,6 +172,9 @@ stages:
utapi:
filename: "/artifacts/setup_ft_test:v2.log"
follow: true
vault:
filename: "/artifacts/vault.log"
follow: true
run-v2-limit-tests:
worker:
<< : *workspace
@ -164,16 +183,8 @@ stages:
steps:
- Git: *clone
- ShellCommand: *fetch_lfs
- ShellCommand:
name: Wait for Warp 10
command: |
bash -c "
set -ex
bash tests/utils/wait_for_local_port.bash 4802 60"
logfiles:
warp10:
filename: "/artifacts/warp10.log"
follow: true
- ShellCommand: *wait_for_vault
- ShellCommand: *wait_for_warp10
- ShellCommand:
name: run v2 soft limit tests
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash true ft_test:softLimit
@ -187,6 +198,10 @@ stages:
follow: true
utapi:
filename: "/artifacts/setup_ft_test:softLimit.log"
follow: true
vault:
filename: "/artifacts/vault.log"
follow: true
- ShellCommand:
name: run v2 hard limit tests
command: bash ./eve/workers/unit_and_feature_tests/run_ft_tests.bash true ft_test:hardLimit
@ -201,4 +216,7 @@ stages:
utapi:
filename: "/artifacts/setup_ft_test:hardLimit.log"
follow: true
vault:
filename: "/artifacts/vault.log"
follow: true
- Upload: *upload_artifacts

View File

@ -1,7 +0,0 @@
FROM node:alpine
ADD ./vault.js /usr/share/src/
WORKDIR /usr/share/src/
CMD node vault.js

View File

@ -1,66 +0,0 @@
const http = require('http');
const url = require('url');
const port = process.env.VAULT_PORT || 8500;
const unauthResp = {
ErrorResponse: {
$: {
xmlns: 'https://iam.amazonaws.com/doc/2010-05-08/',
},
Error: {
Code: 'InvalidAccessKeyId',
Message: 'The AWS access key Id you provided does not exist in our records.',
},
RequestId: '97f22e2dba45bca2a5cd:fb375c22ed4ea7500691',
},
};
class Vault {
constructor() {
this._server = null;
}
static _onRequest(req, res) {
const { query } = url.parse(req.url, true);
if (query.accessKey === 'invalidKey') {
res.writeHead(403);
res.write(JSON.stringify(unauthResp));
} else if (query.Action === 'AccountsCanonicalIds') {
res.writeHead(200);
let body;
if (Array.isArray(query.accountIds)) {
body = query.accountIds.map(id => ({
accountId: id,
canonicalId: id.split(':')[1],
}));
} else {
body = [{
accountId: query.accountIds,
canonicalId: query.accountIds.split(':')[1],
}];
}
res.write(JSON.stringify(body));
}
return res.end();
}
start() {
this._server = http.createServer(Vault._onRequest).listen(port);
}
end() {
this._server.close();
}
}
const vault = new Vault();
['SIGINT', 'SIGQUIT', 'SIGTERM'].forEach(eventName => {
process.on(eventName, () => process.exit(0));
});
// eslint-disable-next-line no-console
console.log('Starting Vault Mock...');
vault.start();

View File

@ -18,8 +18,6 @@ spec:
cpu: 1500m
memory: 3Gi
volumeMounts:
- mountPath: /var/run/docker.sock
name: docker-socket
- name: artifacts
readOnly: false
mountPath: /artifacts
@ -55,30 +53,82 @@ spec:
- /init | tee -a /artifacts/redis.log
resources:
requests:
cpu: 200m
memory: 1Gi
cpu: 150m
memory: 500Mi
limits:
cpu: 200m
memory: 500Mi
volumeMounts:
- name: artifacts
readOnly: false
mountPath: /artifacts
- name: redis-replica
image: "{{ images.redis }}"
command:
- sh
- -ce
- redis-server --port 6380 --slaveof localhost 6379 --slave-announce-ip localhost | tee -a /artifacts/redis.log
resources:
requests:
cpu: 150m
memory: 500Mi
limits:
cpu: 150m
memory: 500Mi
volumeMounts:
- name: artifacts
readOnly: false
mountPath: /artifacts
- name: redis-sentinel
image: "{{ images.redis }}"
command:
- sh
- -c
- |-
cat > /tmp/sentinel.conf <<EOF
port 16379
logfile ""
dir /tmp
sentinel announce-ip localhost
sentinel announce-port 16379
sentinel monitor scality-s3 localhost 6379 1
EOF
redis-sentinel /tmp/sentinel.conf
resources:
requests:
cpu: 150m
memory: 500Mi
limits:
cpu: 150m
memory: 500Mi
volumeMounts:
- name: artifacts
readOnly: false
mountPath: /artifacts
ports:
- containerPort: 16379
{% if vars.vault is defined and vars.vault == 'enabled' %}
- name: vault
image: registry.scality.com/vault-dev/vault:c2607856
command:
- bash
- -c
- 'chmod 400 tests/utils/keyfile && yarn start | tee -a /artifacts/vault.log'
env:
- name: VAULT_DB_BACKEND
value: LEVELDB
resources:
requests:
cpu: 500m
memory: 1Gi
limits:
cpu: 500m
memory: 1Gi
volumeMounts:
- name: artifacts
readOnly: false
mountPath: /artifacts
{% if vars.vault is defined and vars.vault == 'enabled' %}
- name: vault
image: "{{ images.vault }}"
resources:
requests:
cpu: 10m
memory: 64Mi
limits:
cpu: 50m
memory: 128Mi
{% endif %}
volumes:
- name: docker-socket
hostPath:
path: /var/run/docker.sock
type: Socket
- name: artifacts
emptyDir: {}

View File

@ -1,4 +1,4 @@
FROM buildpack-deps:jessie-curl
FROM buildpack-deps:bullseye-curl
#
# Install apt packages needed by utapi and buildbot_worker
@ -13,6 +13,10 @@ COPY eve/workers/unit_and_feature_tests/utapi_packages.list eve/workers/unit_and
WORKDIR /utapi
#
# Install Nodejs, yarn, git-lfs and buildbot deps
#
RUN wget https://nodejs.org/dist/v${NODE_VERSION}/node-v${NODE_VERSION}-linux-x64.tar.gz \
&& tar -xf node-v${NODE_VERSION}-linux-x64.tar.gz --directory /usr/local --strip-components 1 \
&& apt-get update -qq \
@ -20,10 +24,11 @@ RUN wget https://nodejs.org/dist/v${NODE_VERSION}/node-v${NODE_VERSION}-linux-x6
&& curl -sS http://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
&& echo "deb http://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list \
&& curl -sSL https://packagecloud.io/github/git-lfs/gpgkey | apt-key add - \
&& echo "deb https://packagecloud.io/github/git-lfs/debian/ jessie main" | tee /etc/apt/sources.list.d/lfs.list \
&& echo "deb https://packagecloud.io/github/git-lfs/debian/ bullseye main" | tee /etc/apt/sources.list.d/lfs.list \
&& curl -sSL https://download.docker.com/linux/debian/gpg | apt-key add - \
&& echo "deb [arch=amd64] https://download.docker.com/linux/debian bullseye stable" > /etc/apt/sources.list.d/docker.list \
&& apt-get update -qq \
&& cat /tmp/*packages.list | xargs apt-get install -y \
&& pip install pip==9.0.1 \
&& cat /tmp/*packages.list | xargs apt-get install -y --no-install-recommends\
&& rm -rf /var/lib/apt/lists/* \
&& rm -f /tmp/*packages.list \
&& rm -f /etc/supervisor/conf.d/*.conf \
@ -38,15 +43,18 @@ COPY package.json yarn.lock /utapi/
RUN yarn cache clean \
&& yarn install --frozen-lockfile \
&& yarn cache clean
#
# Run buildbot-worker on startup through supervisor
#
ARG BUILDBOT_VERSION
RUN pip install buildbot-worker==$BUILDBOT_VERSION
RUN pip3 install requests
RUN pip3 install redis==3.5.3
#
# Install buildbot
#
ARG BUILDBOT_VERSION=2.7.0
RUN pip3 install buildbot-worker==$BUILDBOT_VERSION requests redis
ADD eve/workers/unit_and_feature_tests/supervisor/buildbot_worker.conf /etc/supervisor/conf.d/
ADD eve/workers/unit_and_feature_tests/redis/sentinel.conf /etc/sentinel.conf
#
# Run buildbot-worker on startup through supervisor
#
CMD ["supervisord", "-n"]

View File

@ -3,9 +3,6 @@ git
git-lfs
libffi-dev
libssl-dev
python2.7
python2.7-dev
python-pip
sudo
supervisor
lsof

View File

@ -3,3 +3,6 @@ redis-server
python3
python3-pip
yarn
docker-ce-cli
jq
moreutils

View File

@ -12,6 +12,9 @@ const RedisClient = require('../libV2/redis');
const REINDEX_SCHEDULE = '0 0 * * Sun';
const REINDEX_LOCK_KEY = 's3:utapireindex:lock';
const REINDEX_LOCK_TTL = (60 * 60) * 24;
const REINDEX_PYTHON_INTERPRETER = process.env.REINDEX_PYTHON_INTERPRETER !== undefined
? process.env.REINDEX_PYTHON_INTERPRETER
: 'python3.4';
class UtapiReindex {
constructor(config) {
@ -109,7 +112,7 @@ class UtapiReindex {
const flags = this._buildFlags();
this._requestLogger.debug(`launching subprocess ${path} `
+ `with flags: ${flags}`);
const process = childProcess.spawn('python3.4', [
const process = childProcess.spawn(REINDEX_PYTHON_INTERPRETER, [
path,
...flags,
]);

View File

@ -11,7 +11,12 @@ const encode = require('encoding-down');
/* eslint-enable import/no-extraneous-dependencies */
const { UtapiMetric } = require('../models');
const { LoggerContext, asyncOrCallback } = require('../utils');
const {
LoggerContext,
logEventFilter,
asyncOrCallback,
buildFilterChain,
} = require('../utils');
const moduleLogger = new LoggerContext({
module: 'client',
@ -84,6 +89,11 @@ class UtapiClient {
this._drainCanSchedule = true;
this._drainDelay = (config && config.drainDelay) || 30000;
this._suppressedEventFields = (config && config.suppressedEventFields) || null;
const eventFilters = (config && config.filter) || {};
this._shouldPushMetric = buildFilterChain(eventFilters);
if (Object.keys(eventFilters).length !== 0) {
logEventFilter((...args) => moduleLogger.info(...args), 'utapi event filter enabled', eventFilters);
}
}
async join() {
@ -240,6 +250,11 @@ class UtapiClient {
? data
: new UtapiMetric(data);
// If this event has been filtered then exit early
if (!this._shouldPushMetric(metric)) {
return;
}
// Assign a uuid if one isn't passed
if (!metric.uuid) {
metric.uuid = uuid.v4();

View File

@ -44,5 +44,13 @@
"diskUsage": {
"retentionDays": 45,
"expirationEnabled": false
},
"serviceUser": {
"arn": "arn:aws:iam::000000000000:user/service-utapi-user",
"enabled": false
},
"filter": {
"allow": {},
"deny": {}
}
}

View File

@ -3,7 +3,9 @@ const path = require('path');
const Joi = require('@hapi/joi');
const assert = require('assert');
const { truthy, envNamespace } = require('../constants');
const {
truthy, envNamespace, allowedFilterFields, allowedFilterStates,
} = require('../constants');
const configSchema = require('./schema');
// We need to require the specific file rather than the parent module to avoid a circular require
const { parseDiskSizeSpec } = require('../utils/disk');
@ -227,6 +229,28 @@ class Config {
return certs;
}
static _parseResourceFilters(config) {
const resourceFilters = {};
allowedFilterFields.forEach(
field => allowedFilterStates.forEach(
state => {
const configResources = (config[state] && config[state][field]) || null;
const envVar = `FILTER_${field.toUpperCase()}_${state.toUpperCase()}`;
const resources = _loadFromEnv(envVar, configResources, _typeCasts.list);
if (resources) {
if (resourceFilters[field]) {
throw new Error('You can not define both an allow and a deny list for an event field.');
}
resourceFilters[field] = { [state]: new Set(resources) };
}
},
),
);
return resourceFilters;
}
_parseConfig(config) {
const parsedConfig = {};
@ -342,6 +366,13 @@ class Config {
parsedConfig.bucketd = _loadFromEnv('BUCKETD_BOOTSTRAP', config.bucketd, _typeCasts.serverList);
parsedConfig.serviceUser = {
arn: _loadFromEnv('SERVICE_USER_ARN', config.serviceUser.arn),
enabled: _loadFromEnv('SERVICE_USER_ENABLED', config.serviceUser.enabled, _typeCasts.bool),
};
parsedConfig.filter = Config._parseResourceFilters(config.filter);
return parsedConfig;
}

View File

@ -1,4 +1,5 @@
const Joi = require('@hapi/joi');
const { allowedFilterFields, allowedFilterStates } = require('../constants');
const backoffSchema = Joi.object({
min: Joi.number(),
@ -99,6 +100,21 @@ const schema = Joi.object({
expirationEnabled: Joi.boolean(),
hardLimit: Joi.string(),
}),
serviceUser: Joi.object({
arn: Joi.string(),
enabled: Joi.boolean(),
}),
filter: Joi.object(allowedFilterStates.reduce(
(filterObj, state) => {
filterObj[state] = allowedFilterFields.reduce(
(stateObj, field) => {
stateObj[field] = Joi.array().items(Joi.string());
return stateObj;
}, {},
);
return filterObj;
}, {},
)),
});
module.exports = schema;

View File

@ -103,6 +103,14 @@ const constants = {
putDeleteMarkerObject: 'deleteObject',
},
expirationChunkDuration: 900000000, // 15 minutes in microseconds
allowedFilterFields: [
'operationId',
'location',
'account',
'user',
'bucket',
],
allowedFilterStates: ['allow', 'deny'],
};
constants.operationToResponse = constants.operations

View File

@ -10,7 +10,7 @@ const moduleLogger = new LoggerContext({
const params = {
bucketdBootstrap: config.bucketd,
https: config.https,
https: config.tls,
};
module.exports = new BucketClientInterface(params, bucketclient, moduleLogger);

View File

@ -1,9 +1,13 @@
/* eslint-disable no-restricted-syntax */
const { usersBucket, splitter: mdKeySplitter, mpuBucketPrefix } = require('arsenal').constants;
const arsenal = require('arsenal');
const metadata = require('./client');
const { LoggerContext, logger } = require('../utils');
const { keyVersionSplitter } = require('../constants');
const { usersBucket, splitter: mdKeySplitter, mpuBucketPrefix } = arsenal.constants;
const { BucketInfo } = arsenal.models;
const moduleLogger = new LoggerContext({
module: 'metadata.client',
});
@ -108,9 +112,25 @@ function bucketExists(bucket) {
));
}
function getBucket(bucket) {
return new Promise((resolve, reject) => {
metadata.getBucketAttributes(
bucket,
logger.newRequestLogger(), (err, data) => {
if (err) {
reject(err);
return;
}
resolve(BucketInfo.fromObj(data));
},
);
});
}
module.exports = {
listBuckets,
listObjects,
listMPUs,
bucketExists,
getBucket,
};

View File

@ -118,6 +118,10 @@ async function authV4Middleware(request, response, params) {
[passed, authorizedResources] = await translateAndAuthorize(request, action, params.level, requestedResources);
} catch (error) {
request.logger.error('error during authentication', { error });
// rethrow any access denied errors
if (error.AccessDenied) {
throw error;
}
throw errors.InternalError;
}

View File

@ -7,7 +7,12 @@ const config = require('../config');
const metadata = require('../metadata');
const { serviceToWarp10Label, warp10RecordType } = require('../constants');
const { LoggerContext, convertTimestamp } = require('../utils');
const {
LoggerContext,
logEventFilter,
convertTimestamp,
buildFilterChain,
} = require('../utils');
const logger = new LoggerContext({
module: 'ReindexTask',
@ -18,6 +23,11 @@ class ReindexTask extends BaseTask {
super(options);
this._defaultSchedule = config.reindexSchedule;
this._defaultLag = 0;
const eventFilters = (config && config.filter) || {};
this._shouldReindex = buildFilterChain((config && config.filter) || {});
if (Object.keys(eventFilters).length !== 0) {
logEventFilter((...args) => logger.info(...args), 'reindex resource filtering enabled', eventFilters);
}
}
async _setup(includeDefaultOpts = true) {
@ -145,6 +155,11 @@ class ReindexTask extends BaseTask {
const accountTotals = {};
const ignoredAccounts = new Set();
await async.eachLimit(this.targetBuckets, 5, async bucket => {
if (!this._shouldReindex({ bucket: bucket.name, account: bucket.account })) {
logger.debug('skipping excluded bucket', { bucket: bucket.name, account: bucket.account });
return;
}
logger.info('started bucket reindex', { bucket: bucket.name });
const mpuBucket = `${mpuBucketPrefix}${bucket.name}`;

47
libV2/utils/filter.js Normal file
View File

@ -0,0 +1,47 @@
const assert = require('assert');
/**
* filterObject
*
* Constructs a function meant for filtering Objects by the value of a key
* Returned function returns a boolean with false meaning the object was present
* in the filter allowing the function to be passed directly to Array.filter etc.
*
* @param {string} key - Object key to inspect
* @param {Object} filter
* @param {Set} [filter.allow] - Set containing keys to include
* @param {Set} [filter.deny] - Set containing keys to not include
* @returns {function(Object): bool}
*/
function filterObject(obj, key, { allow, deny }) {
if (allow && deny) {
throw new Error('You can not define both an allow and a deny list.');
}
if (!allow && !deny) {
throw new Error('You must define either an allow or a deny list.');
}
if (allow) {
assert(allow instanceof Set);
return obj[key] === undefined || allow.has(obj[key]);
}
assert(deny instanceof Set);
return obj[key] === undefined || !deny.has(obj[key]);
}
/**
* buildFilterChain
*
* Constructs a function from a map of key names and allow/deny filters.
* The returned function returns a boolean with false meaning the object was present
* in one of the filters allowing the function to be passed directly to Array.filter etc.
*
* @param {Object<string, Object<string, Set>} filters
* @returns {function(Object): bool}
*/
function buildFilterChain(filters) {
return obj => Object.entries(filters).every(([key, filter]) => filterObject(obj, key, filter));
}
module.exports = { filterObject, buildFilterChain };

View File

@ -3,6 +3,7 @@ const shard = require('./shard');
const timestamp = require('./timestamp');
const func = require('./func');
const disk = require('./disk');
const filter = require('./filter');
module.exports = {
...log,
@ -10,4 +11,5 @@ module.exports = {
...timestamp,
...func,
...disk,
...filter,
};

View File

@ -1,5 +1,6 @@
const werelogs = require('werelogs');
const config = require('../config');
const { comprehend } = require('./func');
const loggerConfig = {
level: config.logging.level,
@ -102,8 +103,26 @@ function buildRequestLogger(req) {
return new LoggerContext({}, reqLogger);
}
function logEventFilter(logger, msg, eventFilters) {
const filterLog = comprehend(
eventFilters,
(level, rules) => ({
key: level,
value: comprehend(
rules,
(rule, values) => ({
key: rule,
value: Array.from(values),
}),
),
}),
);
logger(msg, { filters: filterLog });
}
module.exports = {
logger: rootLogger,
buildRequestLogger,
LoggerContext,
logEventFilter,
};

View File

@ -1,179 +0,0 @@
const assert = require('assert');
const { auth, policies } = require('arsenal');
const vaultclient = require('vaultclient');
const config = require('./config');
const errors = require('./errors');
/**
@class Vault
* Creates a vault instance for authentication and authorization
*/
class Vault {
constructor(options) {
const { host, port } = options.vaultd;
if (options.tls) {
const { key, cert, ca } = options.tls;
this._client = new vaultclient.Client(host, port, true, key, cert,
ca);
} else {
this._client = new vaultclient.Client(host, port);
}
}
/** authenticateV4Request
*
* @param {object} params - the authentication parameters as returned by
* auth.extractParams
* @param {number} params.version - shall equal 4
* @param {string} params.data.accessKey - the user's accessKey
* @param {string} params.data.signatureFromRequest - the signature read from
* the request
* @param {string} params.data.region - the AWS region
* @param {string} params.data.stringToSign - the stringToSign
* @param {string} params.data.scopeDate - the timespan to allow the request
* @param {string} params.data.authType - the type of authentication
* (query or header)
* @param {string} params.data.signatureVersion - the version of the
* signature (AWS or AWS4)
* @param {number} params.data.signatureAge - the age of the signature in ms
* @param {string} params.data.log - the logger object
* @param {RequestContext []} requestContexts - an array of
* RequestContext instances which contain information
* for policy authorization check
* @param {function} callback - cb(err)
* @return {undefined}
*/
authenticateV4Request(params, requestContexts, callback) {
const {
accessKey, signatureFromRequest, region, scopeDate,
stringToSign,
} = params.data;
const { log } = params;
log.debug('authenticating V4 request');
const serializedRCs = requestContexts.map(rc => rc.serialize());
this._client.verifySignatureV4(
stringToSign, signatureFromRequest,
accessKey, region, scopeDate,
{ reqUid: log.getSerializedUids(), requestContext: serializedRCs },
(err, authInfo) => {
if (err) {
log.trace('error from vault', { error: err });
return callback(err);
}
return callback(null,
authInfo.message.body.authorizationResults);
},
);
}
/**
* Returns canonical Ids for a given list of account Ids
* @param {string[]} accountIds - list of account ids
* @param {object} log - Werelogs request logger
* @return {Promise} -
*/
getCanonicalIds(accountIds, log) {
log.debug('retrieving canonical ids for account ids', {
method: 'Vault.getCanonicalIds',
});
return new Promise((resolve, reject) =>
this._client.getCanonicalIdsByAccountIds(accountIds,
{ reqUid: log.getSerializedUids(), logger: log }, (err, res) => {
if (err) {
reject(err);
return;
}
if (!res.message || !res.message.body) {
reject(errors.InternalError);
return;
}
resolve(res.message.body.map(acc => ({
resource: acc.accountId,
id: acc.canonicalId,
})));
}));
}
}
const vault = new Vault(config);
auth.setHandler(vault);
async function translateResourceIds(level, resources, log) {
if (level === 'accounts') {
return vault.getCanonicalIds(resources, log);
}
return resources.map(resource => ({ resource, id: resource }));
}
async function authenticateRequest(request, action, level, resources) {
const policyContext = new policies.RequestContext(
request.headers,
request.query,
level,
resources,
request.ip,
request.ctx.encrypted,
action,
'utapi',
);
return new Promise((resolve, reject) => {
auth.server.doAuth(request, request.logger.logger, (err, res) => {
if (err && (err.InvalidAccessKeyId || err.AccessDenied)) {
resolve([false]);
return;
}
if (err) {
reject(err);
return;
}
// Will only have res if request is from a user rather than an account
let authorizedResources = resources;
if (res) {
try {
authorizedResources = res.reduce(
(authed, result) => {
if (result.isAllowed) {
// result.arn should be of format:
// arn:scality:utapi:::resourcetype/resource
assert(typeof result.arn === 'string');
assert(result.arn.indexOf('/') > -1);
const resource = result.arn.split('/')[1];
authed.push(resource);
request.logger.trace('access granted for resource', { resource });
}
return authed;
}, [],
);
} catch (err) {
reject(err);
}
} else {
request.logger.trace('granted access to all resources');
}
resolve([
authorizedResources.length !== 0,
authorizedResources,
]);
}, 's3', [policyContext]);
});
}
async function translateAndAuthorize(request, action, level, resources) {
const [authed, authorizedResources] = await authenticateRequest(request, action, level, resources);
if (!authed) {
return [authed];
}
const translated = await translateResourceIds(level, authorizedResources, request.logger.logger);
return [authed, translated];
}
module.exports = {
authenticateRequest,
translateAndAuthorize,
Vault,
vault,
};

128
libV2/vault/client.js Normal file
View File

@ -0,0 +1,128 @@
const assert = require('assert');
const { auth, policies } = require('arsenal');
const vaultclient = require('vaultclient');
const config = require('../config');
const errors = require('../errors');
/**
@class Vault
* Creates a vault instance for authentication and authorization
*/
class VaultWrapper extends auth.Vault {
constructor(options) {
let client;
const { host, port } = options.vaultd;
if (options.tls) {
const { key, cert, ca } = options.tls;
client = new vaultclient.Client(host, port, true, key, cert,
ca);
} else {
client = new vaultclient.Client(host, port);
}
super(client, 'vault');
}
/**
* Returns canonical Ids for a given list of account Ids
* @param {string[]} accountIds - list of account ids
* @param {object} log - Werelogs request logger
* @return {Promise} -
*/
getCanonicalIds(accountIds, log) {
log.debug('retrieving canonical ids for account ids', {
method: 'Vault.getCanonicalIds',
accountIds,
});
return new Promise((resolve, reject) =>
this.client.getCanonicalIdsByAccountIds(accountIds,
{ reqUid: log.getSerializedUids(), logger: log }, (err, res) => {
if (err) {
reject(err);
return;
}
if (!res.message || !res.message.body) {
reject(errors.InternalError);
return;
}
resolve(res.message.body.map(acc => ({
resource: acc.accountId,
id: acc.canonicalId,
})));
}));
}
// eslint-disable-next-line class-methods-use-this
authenticateRequest(request, action, level, resources) {
const policyContext = new policies.RequestContext(
request.headers,
request.query,
level,
resources,
request.ip,
request.ctx.encrypted,
action,
'utapi',
);
return new Promise((resolve, reject) => {
auth.server.doAuth(
request,
request.logger.logger,
(err, authInfo, authRes) => {
if (err && (err.InvalidAccessKeyId || err.AccessDenied)) {
resolve({ authed: false });
return;
}
if (err) {
reject(err);
return;
}
// Only IAM users will return authorizedResources
let authorizedResources = resources;
if (authRes) {
authorizedResources = authRes
.filter(resource => resource.isAllowed)
.map(resource => {
// resource.arn should be of format:
// arn:scality:utapi:::resourcetype/resource
assert(typeof resource.arn === 'string');
assert(resource.arn.indexOf('/') > -1);
return resource.arn.split('/')[1];
});
}
resolve({ authed: true, authInfo, authorizedResources });
}, 's3', [policyContext],
);
});
}
getUsersById(userIds, log) {
log.debug('retrieving user arns for user ids', {
method: 'Vault.getUsersById',
userIds,
});
return new Promise((resolve, reject) =>
this.client.getUsersById(userIds,
{ reqUid: log.getSerializedUids(), logger: log }, (err, res) => {
if (err) {
reject(err);
return;
}
if (!res.message || !res.message.body) {
reject(errors.InternalError);
return;
}
resolve(res.message.body);
}));
}
}
const vault = new VaultWrapper(config);
auth.setHandler(vault);
module.exports = {
VaultWrapper,
vault,
};

172
libV2/vault/index.js Normal file
View File

@ -0,0 +1,172 @@
const { vault } = require('./client');
const metadata = require('../metadata');
const errors = require('../errors');
const config = require('../config');
async function authorizeAccountAccessKey(authInfo, level, resources, log) {
let authed = false;
let authedRes = [];
log.trace('Authorizing account', { resources });
switch (level) {
// Account keys can only query metrics their own account metrics
// So we can short circuit the auth to ->
// Did they request their account? Then authorize ONLY their account
case 'accounts':
authed = resources.some(r => r === authInfo.getShortid());
authedRes = authed ? [{ resource: authInfo.getShortid(), id: authInfo.getCanonicalID() }] : [];
break;
// Account keys are allowed access to any of their child users metrics
case 'users': {
let users;
try {
users = await vault.getUsersById(resources, log.logger);
} catch (error) {
log.error('failed to fetch user', { error });
throw errors.AccessDenied;
}
authedRes = users
.filter(user => user.parentId === authInfo.getShortid())
.map(user => ({ resource: user.id, id: user.id }));
authed = authedRes.length !== 0;
break;
}
// Accounts are only allowed access if they are the owner of the bucket
case 'buckets': {
const buckets = await Promise.all(
resources.map(async bucket => {
try {
const bucketMD = await metadata.getBucket(bucket);
return bucketMD;
} catch (error) {
log.error('failed to fetch metadata for bucket', { error, bucket });
throw errors.AccessDenied;
}
}),
);
authedRes = buckets
.filter(bucket => bucket.getOwner() === authInfo.getCanonicalID())
.map(bucket => ({ resource: bucket.getName(), id: bucket.getName() }));
authed = authedRes.length !== 0;
break;
}
// Accounts can not access service resources
case 'services':
break;
default:
log.error('Unknown metric level', { level });
throw new Error(`Unknown metric level ${level}`);
}
return [authed, authedRes];
}
async function authorizeUserAccessKey(authInfo, level, resources, log) {
let authed = false;
let authedRes = [];
log.trace('Authorizing IAM user', { resources });
// Get the parent account id from the user's arn
const parentAccountId = authInfo.getArn().split(':')[4];
// All users require an attached policy to query metrics
// Additional filtering is performed here to limit access to the user's account
switch (level) {
// User keys can only query metrics their own account metrics
// So we can short circuit the auth to ->
// Did they request their account? Then authorize ONLY their account
case 'accounts': {
authed = resources.some(r => r === parentAccountId);
authedRes = authed ? [{ resource: parentAccountId, id: authInfo.getCanonicalID() }] : [];
break;
}
// Users can query other user's metrics if they are under the same account
case 'users': {
let users;
try {
users = await vault.getUsersById(resources, log.logger);
} catch (error) {
log.error('failed to fetch user', { error });
throw errors.AccessDenied;
}
authedRes = users
.filter(user => user.parentId === parentAccountId)
.map(user => ({ resource: user.id, id: user.id }));
authed = authedRes.length !== 0;
break;
}
// Users can query bucket metrics if they are owned by the same account
case 'buckets': {
let buckets;
try {
buckets = await Promise.all(
resources.map(bucket => metadata.getBucket(bucket)),
);
} catch (error) {
log.error('failed to fetch metadata for bucket', { error });
throw error;
}
authedRes = buckets
.filter(bucket => bucket.getOwner() === authInfo.getCanonicalID())
.map(bucket => ({ resource: bucket.getName(), id: bucket.getName() }));
authed = authedRes.length !== 0;
break;
}
case 'services':
break;
default:
log.error('Unknown metric level', { level });
throw new Error(`Unknown metric level ${level}`);
}
return [authed, authedRes];
}
async function authorizeServiceUser(authInfo, level, resources, log) {
log.trace('Authorizing service user', { resources, arn: authInfo.getArn() });
// The service user is allowed access to any resource so no checking is done
if (level === 'accounts') {
const canonicalIds = await vault.getCanonicalIds(resources, log.logger);
return [canonicalIds.length !== 0, canonicalIds];
}
return [resources.length !== 0, resources.map(resource => ({ resource, id: resource }))];
}
async function translateAndAuthorize(request, action, level, resources) {
const {
authed,
authInfo,
authorizedResources,
} = await vault.authenticateRequest(request, action, level, resources);
if (!authed) {
return [false, []];
}
if (config.serviceUser.enabled && authInfo.getArn() === config.serviceUser.arn) {
return authorizeServiceUser(authInfo, level, authorizedResources, request.logger);
}
if (authInfo.isRequesterAnIAMUser()) {
return authorizeUserAccessKey(authInfo, level, authorizedResources, request.logger);
}
return authorizeAccountAccessKey(authInfo, level, authorizedResources, request.logger);
}
module.exports = {
translateAndAuthorize,
vault,
};

View File

@ -21,6 +21,7 @@
"@senx/warp10": "^1.0.14",
"arsenal": "scality/Arsenal#65966f5",
"async": "^3.2.0",
"aws-sdk": "^2.1005.0",
"aws4": "^1.8.0",
"backo": "^1.1.0",
"body-parser": "^1.19.0",
@ -39,8 +40,8 @@
"oas-tools": "^2.1.8",
"prom-client": "^13.1.0",
"uuid": "^3.3.2",
"vaultclient": "scality/vaultclient#ff9e92f",
"werelogs": "scality/werelogs#0a4c576"
"vaultclient": "scality/vaultclient#9013faab804a7ee61bd8bc4c71de89c3498bb6c0",
"werelogs": "scality/werelogs#8.1.0"
},
"devDependencies": {
"eslint": "^5.12.0",
@ -54,10 +55,10 @@
},
"resolutions": {
"**/@yarnpkg/fslib": "2.4.0",
"**/@yarnpkg/libzip" : "2.2.1",
"**/@yarnpkg/libzip": "2.2.1",
"**/@yarnpkg/json-proxy": "2.1.0",
"**/@yarnpkg/parsers": "2.3.0"
},
},
"scripts": {
"ft_test": "mocha --recursive tests/functional",
"ft_test:client": "mocha --recursive tests/functional/client",

View File

@ -9,7 +9,8 @@ const { convertTimestamp, now } = require('../../../../libV2/utils');
const { operationToResponse } = require('../../../../libV2/constants');
const { generateCustomEvents } = require('../../../utils/v2Data');
const { UtapiMetric } = require('../../../../libV2/models');
const { BucketD } = require('../../../utils/mock/');
const vaultclient = require('../../../utils/vaultclient');
const warp10 = warp10Clients[0];
const _now = Math.floor(new Date().getTime() / 1000);
@ -28,7 +29,7 @@ const emptyOperationsResponse = Object.values(operationToResponse)
return prev;
}, {});
async function listMetrics(level, resources, start, end, force403 = false) {
async function listMetrics(level, resources, start, end, credentials) {
const body = {
[level]: resources,
};
@ -50,12 +51,13 @@ async function listMetrics(level, resources, start, end, force403 = false) {
},
};
const credentials = {
accessKeyId: force403 ? 'invalidKey' : 'accessKey1',
secretAccessKey: 'verySecretKey1',
const { accessKey: accessKeyId, secretKey: secretAccessKey } = credentials;
const _credentials = {
accessKeyId,
secretAccessKey,
};
const sig = aws4.sign(headers, credentials);
const sig = aws4.sign(headers, _credentials);
return needle(
'post',
@ -81,140 +83,202 @@ function opsToResp(operations) {
}, { ...emptyOperationsResponse });
}
const testCases = [
{
desc: 'for a single resource',
args: { [uuid.v4()]: { [uuid.v4()]: [uuid.v4()] } },
},
{
desc: 'for multiple resources',
args: {
[uuid.v4()]: {
[uuid.v4()]: [uuid.v4(), uuid.v4(), uuid.v4()],
[uuid.v4()]: [uuid.v4(), uuid.v4(), uuid.v4()],
},
[uuid.v4()]: {
[uuid.v4()]: [uuid.v4(), uuid.v4(), uuid.v4()],
[uuid.v4()]: [uuid.v4(), uuid.v4(), uuid.v4()],
},
[uuid.v4()]: {
[uuid.v4()]: [uuid.v4(), uuid.v4(), uuid.v4()],
[uuid.v4()]: [uuid.v4(), uuid.v4(), uuid.v4()],
},
},
},
];
function assertMetricResponse(provided, expected) {
assert.deepStrictEqual(provided.operations, opsToResp(expected.ops));
assert.strictEqual(provided.incomingBytes, expected.in);
assert.strictEqual(provided.outgoingBytes, expected.out);
assert.deepStrictEqual(provided.storageUtilized, [0, expected.bytes]);
assert.deepStrictEqual(provided.numberOfObjects, [0, expected.count]);
}
describe('Test listMetric', function () {
this.timeout(10000);
testCases.forEach(testCase => {
describe(testCase.desc, () => {
const bucketd = new BucketD(true);
let account;
let user;
let otherAccount;
let otherUser;
let serviceAccount;
let serviceUser;
const bucket = uuid.v4();
const otherBucket = uuid.v4();
let totals;
before(async () => {
account = await vaultclient.createAccountAndKeys(uuid.v4());
user = await vaultclient.createUserAndKeys(account, uuid.v4());
otherAccount = await vaultclient.createAccountAndKeys(uuid.v4());
otherUser = await vaultclient.createUser(otherAccount, uuid.v4());
serviceAccount = await vaultclient.createInternalServiceAccountAndKeys();
serviceUser = await vaultclient.createUserAndKeys(serviceAccount, 'service-utapi-user');
await Promise.all([
vaultclient.createAndAttachUtapiPolicy(account, user, 'buckets', '*'),
vaultclient.createAndAttachUtapiPolicy(account, user, 'accounts', '*'),
vaultclient.createAndAttachUtapiPolicy(account, user, 'users', '*'),
vaultclient.createAndAttachUtapiPolicy(serviceAccount, serviceUser, 'buckets', '*'),
vaultclient.createAndAttachUtapiPolicy(serviceAccount, serviceUser, 'accounts', '*'),
vaultclient.createAndAttachUtapiPolicy(serviceAccount, serviceUser, 'users', '*'),
]);
bucketd.createBucketsWithOwner([
{ name: bucket, owner: account.canonicalId },
{ name: otherBucket, owner: otherAccount.canonicalId },
]);
bucketd.start();
const { events, totals: _totals } = generateCustomEvents(
getTs(-360),
getTs(-60),
1000,
testCase.args,
{ [account.canonicalId]: { [user.id]: [bucket] } },
);
totals = _totals;
assert(await ingestEvents(events));
});
after(async () => {
bucketd.end();
await warp10.delete({
className: '~.*',
start: 0,
end: now(),
});
await vaultclient.cleanupAccountAndUsers(account);
await vaultclient.cleanupAccountAndUsers(otherAccount);
await vaultclient.cleanupAccountAndUsers(serviceAccount);
});
const accounts = [];
const users = [];
const buckets = [];
Object.entries(testCase.args)
.forEach(([account, _users]) => {
accounts.push(`account:${account}`);
Object.entries(_users).forEach(([user, _buckets]) => {
users.push(user);
buckets.push(..._buckets);
});
});
const metricQueries = {
accounts,
users,
buckets,
};
Object.entries(metricQueries)
.forEach(query => {
const [level, resources] = query;
it(`should get metrics for ${level}`, async () => {
const resp = await listMetrics(...query, getTs(-500), getTs(0));
describe('test account credentials', () => {
it('should list metrics for the same account', async () => {
const resp = await listMetrics('accounts', [account.id], getTs(-500), getTs(0), account);
assert(Array.isArray(resp.body));
const { body } = resp;
assert.deepStrictEqual(body.map(r => r[metricResponseKeys[level]]), resources);
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.accounts]), [account.id]);
body.forEach(metric => {
const key = metric[metricResponseKeys[level]];
const _key = level === 'accounts' ? key.split(':')[1] : key;
const expected = totals[level][_key];
assert.deepStrictEqual(metric.operations, opsToResp(expected.ops));
assert.strictEqual(metric.incomingBytes, expected.in);
assert.strictEqual(metric.outgoingBytes, expected.out);
assert.deepStrictEqual(metric.storageUtilized, [0, expected.bytes]);
assert.deepStrictEqual(metric.numberOfObjects, [0, expected.count]);
});
});
});
assertMetricResponse(metric, totals.accounts[account.canonicalId]);
});
});
it('should return 0 in metrics are negative', async () => {
const bucket = `imabucket-${uuid.v4()}`;
const account = `imaaccount-${uuid.v4()}`;
const event = new UtapiMetric({
timestamp: getTs(0),
bucket,
account,
objectDelta: -1,
sizeDelta: -1,
incomingBytes: -1,
outgoingBytes: -1,
operationId: 'putObject',
it("should list metrics for an account's user", async () => {
const resp = await listMetrics('users', [user.id], getTs(-500), getTs(0), account);
assert(Array.isArray(resp.body));
const { body } = resp;
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.users]), [user.id]);
body.forEach(metric => {
assertMetricResponse(metric, totals.users[user.id]);
});
});
await ingestEvents([event]);
const bucketResp = await listMetrics('buckets', [bucket], getTs(-1), getTs(1));
assert(Array.isArray(bucketResp.body));
const [bucketMetric] = bucketResp.body;
assert.deepStrictEqual(bucketMetric.storageUtilized, [0, 0]);
assert.deepStrictEqual(bucketMetric.numberOfObjects, [0, 0]);
assert.deepStrictEqual(bucketMetric.incomingBytes, 0);
assert.deepStrictEqual(bucketMetric.outgoingBytes, 0);
const accountResp = await listMetrics('accounts', [account], getTs(-1), getTs(1));
assert(Array.isArray(accountResp.body));
const [accountMetric] = accountResp.body;
assert.deepStrictEqual(accountMetric.storageUtilized, [0, 0]);
assert.deepStrictEqual(accountMetric.numberOfObjects, [0, 0]);
assert.deepStrictEqual(accountMetric.incomingBytes, 0);
assert.deepStrictEqual(accountMetric.outgoingBytes, 0);
it("should list metrics for an account's bucket", async () => {
const resp = await listMetrics('buckets', [bucket], getTs(-500), getTs(0), account);
assert(Array.isArray(resp.body));
const { body } = resp;
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.buckets]), [bucket]);
body.forEach(metric => {
assertMetricResponse(metric, totals.buckets[bucket]);
});
});
it('should return a 403 if unauthorized', async () => {
const resp = await listMetrics('buckets', ['test'], getTs(-1), getTs(1), true);
it('should return Access Denied for a different account', async () => {
const resp = await listMetrics('accounts', [otherAccount.id], getTs(-500), getTs(0), account);
assert.strictEqual(resp.statusCode, 403);
assert.deepStrictEqual(resp.body, { code: 'AccessDenied', message: 'Access Denied' });
});
it('should use the current timestamp for "end" if it is not provided', async () => {
const resp = await listMetrics('buckets', ['test'], getTs(-1));
assert.strictEqual(resp.body[0].timeRange.length, 2);
it("should return Access Denied for a different account's user", async () => {
const resp = await listMetrics('users', [otherUser.id], getTs(-500), getTs(0), account);
assert.strictEqual(resp.statusCode, 403);
assert.deepStrictEqual(resp.body, { code: 'AccessDenied', message: 'Access Denied' });
});
it("should return Access Denied for a different account's bucket", async () => {
const resp = await listMetrics('buckets', [otherBucket], getTs(-500), getTs(0), account);
assert.strictEqual(resp.statusCode, 403);
assert.deepStrictEqual(resp.body, { code: 'AccessDenied', message: 'Access Denied' });
});
});
describe('test user credentials', () => {
it('should list metrics for the same account', async () => {
const resp = await listMetrics('accounts', [account.id], getTs(-500), getTs(0), user);
assert(Array.isArray(resp.body));
const { body } = resp;
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.accounts]), [account.id]);
body.forEach(metric => {
assertMetricResponse(metric, totals.accounts[account.canonicalId]);
});
});
it("should list metrics for a user's account", async () => {
const resp = await listMetrics('users', [user.id], getTs(-500), getTs(0), user);
assert(Array.isArray(resp.body));
const { body } = resp;
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.users]), [user.id]);
body.forEach(metric => {
assertMetricResponse(metric, totals.users[user.id]);
});
});
it("should list metrics for an user's bucket", async () => {
const resp = await listMetrics('buckets', [bucket], getTs(-500), getTs(0), user);
assert(Array.isArray(resp.body));
const { body } = resp;
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.buckets]), [bucket]);
body.forEach(metric => {
assertMetricResponse(metric, totals.buckets[bucket]);
});
});
it("should return Access Denied for a different user's account", async () => {
const resp = await listMetrics('accounts', [otherAccount.id], getTs(-500), getTs(0), user);
assert.strictEqual(resp.statusCode, 403);
assert.deepStrictEqual(resp.body, { code: 'AccessDenied', message: 'Access Denied' });
});
it("should return Access Denied for a different account's user", async () => {
const resp = await listMetrics('users', [otherUser.id], getTs(-500), getTs(0), user);
assert.strictEqual(resp.statusCode, 403);
assert.deepStrictEqual(resp.body, { code: 'AccessDenied', message: 'Access Denied' });
});
it("should return Access Denied for a different account's bucket", async () => {
const resp = await listMetrics('buckets', [otherBucket], getTs(-500), getTs(0), user);
assert.strictEqual(resp.statusCode, 403);
assert.deepStrictEqual(resp.body, { code: 'AccessDenied', message: 'Access Denied' });
});
});
describe('test service user credentials', () => {
it('should list metrics for an account', async () => {
const resp = await listMetrics('accounts', [account.id], getTs(-500), getTs(0), serviceUser);
assert(Array.isArray(resp.body));
const { body } = resp;
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.accounts]), [account.id]);
body.forEach(metric => {
assertMetricResponse(metric, totals.accounts[account.canonicalId]);
});
});
it('should list metrics for a user', async () => {
const resp = await listMetrics('users', [user.id], getTs(-500), getTs(0), serviceUser);
assert(Array.isArray(resp.body));
const { body } = resp;
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.users]), [user.id]);
body.forEach(metric => {
assertMetricResponse(metric, totals.users[user.id]);
});
});
it('should list metrics for a bucket', async () => {
const resp = await listMetrics('buckets', [bucket], getTs(-500), getTs(0), serviceUser);
assert(Array.isArray(resp.body));
const { body } = resp;
assert.deepStrictEqual(body.map(r => r[metricResponseKeys.buckets]), [bucket]);
body.forEach(metric => {
assertMetricResponse(metric, totals.buckets[bucket]);
});
});
});
});

View File

@ -31,6 +31,8 @@ describe('Test UtapiClient', function () {
let sandbox;
let events;
let bucket;
let account;
beforeEach(() => {
sandbox = sinon.createSandbox();
@ -38,8 +40,10 @@ describe('Test UtapiClient', function () {
drainDelay: 5000,
});
account = uuid.v4();
bucket = uuid.v4();
const { events: _events } = generateCustomEvents(now() - (60 * 1000000), now() - (10 * 1000000), 50, {
[uuid.v4()]: { [uuid.v4()]: [uuid.v4()] },
[account]: { [uuid.v4()]: [bucket] },
});
// Hack because you can't unpack to previously declared variables,
// and declaring inside the beforeEach wouldn't have the scope needed
@ -94,6 +98,99 @@ describe('Test UtapiClient', function () {
assert.strictEqual(pushSpy.firstCall.args[0][0].object, undefined);
assert.strictEqual(pushSpy.firstCall.threw(), false);
});
it('should prevent events filtered via deny from being pushed', async () => {
client = new UtapiClient({
drainDelay: 5000,
filter: {
bucket: { deny: new Set([bucket]) },
},
});
const pushSpy = sandbox.spy(UtapiClient.prototype, '_pushToUtapi');
const retrySpy = sandbox.spy(UtapiClient.prototype, '_addToRetryCache');
await client.pushMetric(events[0]);
assert.strictEqual(retrySpy.callCount, 0);
assert.strictEqual(pushSpy.callCount, 0);
});
it('should prevent events filtered via allow from being pushed', async () => {
client = new UtapiClient({
drainDelay: 5000,
filter: {
bucket: { allow: new Set(['not-my-bucket']) },
},
});
const pushSpy = sandbox.spy(UtapiClient.prototype, '_pushToUtapi');
const retrySpy = sandbox.spy(UtapiClient.prototype, '_addToRetryCache');
await client.pushMetric(events[0]);
assert.strictEqual(retrySpy.callCount, 0);
assert.strictEqual(pushSpy.callCount, 0);
});
it('should allow events not matching deny list to be pushed', async () => {
client = new UtapiClient({
drainDelay: 5000,
filter: {
bucket: { deny: new Set(['not-my-bucket']) },
},
});
const pushSpy = sandbox.spy(UtapiClient.prototype, '_pushToUtapi');
const retrySpy = sandbox.spy(UtapiClient.prototype, '_addToRetryCache');
await client.pushMetric(events[0]);
assert.strictEqual(retrySpy.callCount, 0);
assert.strictEqual(pushSpy.callCount, 1);
});
it('should allow events matching allow list to be pushed', async () => {
client = new UtapiClient({
drainDelay: 5000,
filter: {
bucket: { allow: new Set([bucket]) },
},
});
const pushSpy = sandbox.spy(UtapiClient.prototype, '_pushToUtapi');
const retrySpy = sandbox.spy(UtapiClient.prototype, '_addToRetryCache');
await client.pushMetric(events[0]);
assert.strictEqual(retrySpy.callCount, 0);
assert.strictEqual(pushSpy.callCount, 1);
});
it('should combine multiple allow and deny filters', async () => {
client = new UtapiClient({
drainDelay: 5000,
filter: {
bucket: { allow: new Set([bucket]) },
account: { deny: new Set([`${account}-deny`]) },
},
});
const pushSpy = sandbox.spy(UtapiClient.prototype, '_pushToUtapi');
const retrySpy = sandbox.spy(UtapiClient.prototype, '_addToRetryCache');
const [allowedEvent, deniedBucketEvent, deniedAccountEvent] = events;
await client.pushMetric(allowedEvent);
deniedBucketEvent.bucket = `${bucket}-deny`;
await client.pushMetric(deniedBucketEvent);
deniedAccountEvent.account = `${account}-deny`;
await client.pushMetric(deniedAccountEvent);
assert.strictEqual(retrySpy.callCount, 0);
assert.strictEqual(pushSpy.callCount, 1);
});
});

View File

@ -0,0 +1,102 @@
/* eslint-disable no-console */
const assert = require('assert');
const { exec } = require('child_process');
const path = require('path');
const vaultclient = require('../../utils/vaultclient');
const ensureServiceUser = path.resolve(__dirname, '../../../bin/ensureServiceUser');
const expectedPolicyDocument = {
Version: '2012-10-17',
Statement: {
Effect: 'Allow',
Action: 'utapi:ListMetrics',
Resource: 'arn:scality:utapi:::*/*',
},
};
async function execPath(path, args, env) {
const proc = exec(`${path} ${args.join(' ')}`, {
env,
stdio: 'pipe',
});
proc.stdout.on('data', data => console.log(data.toString()));
proc.stderr.on('data', data => console.error(data.toString()));
return new Promise((resolve, reject) => {
proc.on('error', err => reject(err));
proc.on('exit', exitCode => {
if (exitCode !== 0) {
reject(new Error(`ensureServiceUser exited with non-zero code ${exitCode}`));
return;
}
resolve();
});
});
}
// Allow overriding the path to the node binary
// useful to work around issues when running locally and using a node version manager
const NODE_INTERPRETER = process.env.NODE_INTERPRETER ? process.env.NODE_INTERPRETER : 'node';
function executeScript(account) {
return execPath(
NODE_INTERPRETER,
[ensureServiceUser, 'apply', 'service-utapi-user'],
{
AWS_ACCESS_KEY_ID: account.accessKey,
AWS_SECRET_ACCESS_KEY: account.secretKey,
AWS_REGION: 'us-east-1',
NODE_TLS_REJECT_UNAUTHORIZED: '0',
},
);
}
describe('test bin/ensureServiceUser', () => {
let adminAccount;
before(async () => {
adminAccount = await vaultclient.createInternalServiceAccountAndKeys();
});
after(() => vaultclient.cleanupAccountAndUsers(adminAccount));
beforeEach(() => executeScript(adminAccount));
afterEach(() => vaultclient.cleanupUsers(adminAccount));
it('should create the service user and attach a policy', async () => {
const res = await vaultclient.getInternalServiceUserAndPolicies(adminAccount);
assert.strictEqual(res.policies.length, 1);
assert.deepStrictEqual(res.policies[0].document, expectedPolicyDocument);
});
it('should exit with success on subsequent runs', async () => {
const res = await vaultclient.getInternalServiceUserAndPolicies(adminAccount);
assert.strictEqual(res.policies.length, 1);
assert.deepStrictEqual(res.policies[0].document, expectedPolicyDocument);
await executeScript(adminAccount);
const res2 = await vaultclient.getInternalServiceUserAndPolicies(adminAccount);
assert.strictEqual(res2.policies.length, 1);
assert.deepStrictEqual(res2.policies[0].document, expectedPolicyDocument);
});
it('should create and attach a policy if the user already exists', async () => {
const detached = await vaultclient.detachUserPolicies(adminAccount, { name: 'service-utapi-user' });
assert.strictEqual(detached.length, 1);
const client = vaultclient.getIAMClient(adminAccount);
await Promise.all(detached.map(PolicyArn => client.deletePolicy({ PolicyArn }).promise()));
await executeScript(adminAccount);
const res = await vaultclient.getInternalServiceUserAndPolicies(adminAccount);
assert.strictEqual(res.policies.length, 1);
assert.deepStrictEqual(res.policies[0].document, expectedPolicyDocument);
});
it('should not create the policy if it already exists', async () => {
await vaultclient.detachUserPolicies(adminAccount, { name: 'service-utapi-user' });
await executeScript(adminAccount);
const res = await vaultclient.getInternalServiceUserAndPolicies(adminAccount);
assert.strictEqual(res.policies.length, 1);
assert.deepStrictEqual(res.policies[0].document, expectedPolicyDocument);
});
});

View File

@ -0,0 +1,118 @@
const assert = require('assert');
const { filterObject, buildFilterChain } = require('../../../../libV2/utils');
const testCases = [
{
filter: { allow: new Set(['foo', 'bar']) },
value: 'foo',
expected: true,
},
{
filter: { allow: new Set(['foo', 'bar']) },
value: 'baz',
expected: false,
},
{
filter: { deny: new Set(['foo', 'bar']) },
value: 'foo',
expected: false,
},
{
filter: { deny: new Set(['foo', 'bar']) },
value: 'baz',
expected: true,
},
];
describe('Test filterObject', () => {
testCases.forEach(testCase => {
const { value, expected, filter } = testCase;
const successMsg = expected ? 'should not filter' : 'should filter';
const state = (expected && filter.allow) || (!expected && filter.deny) ? '' : ' not';
const ruleType = Object.keys(filter)[0];
const msg = `${successMsg} object if value is${state} present in ${ruleType} list`;
it(msg, () => {
assert.strictEqual(filterObject({ value }, 'value', filter), expected);
});
});
it('should not filter an object if the filter key is undefined', () => {
assert.strictEqual(filterObject({}, 'value', { allow: new Set(['foo']) }), true);
});
it('should throw if creating a filter with both allow and deny lists', () => {
assert.throws(() => filterObject('value', { allow: new Set(['foo']), deny: new Set(['bar']) }));
});
it('should throw if creating a filter without an allow or deny lists', () => {
assert.throws(() => filterObject('value', {}));
});
});
const chainTestCases = [
{
key1: { allow: new Set(['allow']) },
key2: { allow: new Set(['allow']) },
msg: 'test chain with multiple allow filters',
},
{
key1: { deny: new Set(['deny']) },
key2: { deny: new Set(['deny']) },
msg: 'test chain with multiple deny filters',
},
{
key1: { allow: new Set(['allow']) },
key2: { deny: new Set(['deny']) },
msg: 'test chain with an allow and a deny filter',
},
];
describe('Test buildFilterChain', () => {
chainTestCases.forEach(testCase => {
const { key1, key2, msg } = testCase;
describe(msg, () => {
const chain = buildFilterChain({ key1, key2 });
it('should return true when both keys are allow', () => {
assert.strictEqual(chain({ key1: 'allow', key2: 'allow' }), true);
});
it('should return true when key1 is undefined and key2 is allow', () => {
assert.strictEqual(chain({ key2: 'allow' }), true);
});
it('should return true when key1 is allow and key2 is undefined', () => {
assert.strictEqual(chain({ key1: 'allow' }), true);
});
it('should return true when both keys are undefined', () => {
assert.strictEqual(chain({}), true);
});
it('should return false when key1 is deny', () => {
assert.strictEqual(chain({ key1: 'deny', key2: 'allow' }), false);
});
it('should return false when key2 is deny', () => {
assert.strictEqual(chain({ key1: 'allow', key2: 'deny' }), false);
});
it('should return false when both keys are deny', () => {
assert.strictEqual(chain({ key1: 'deny', key2: 'deny' }), false);
});
it('should return false when key1 is undefined and key2 is deny', () => {
assert.strictEqual(chain({ key2: 'deny' }), false);
});
it('should return false when key1 is deny and key2 is undefined', () => {
assert.strictEqual(chain({ key1: 'deny' }), false);
});
});
});
it('should return true is no filters are supplied', () => {
const chain = buildFilterChain({});
assert.strictEqual(chain({}), true);
});
});

View File

@ -7,6 +7,7 @@ const { models, constants } = require('arsenal');
const { CANONICAL_ID, BUCKET_NAME, OBJECT_KEY } = require('./values');
const { ObjectMD } = models;
const { mpuBucketPrefix } = constants;
class BucketD {
constructor(isV2 = false) {
@ -49,7 +50,12 @@ class BucketD {
const { splitter } = constants;
const entry = {
key: `${CANONICAL_ID}${splitter}${BUCKET_NAME}-${i + 1}`,
value: JSON.stringify({ creationDate: new Date() }),
value: JSON.stringify({
creationDate: new Date(),
name: `${BUCKET_NAME}-${i + 1}`,
owner: CANONICAL_ID,
ownerDisplayName: 'steve',
}),
};
buckets.push(entry);
}
@ -57,6 +63,22 @@ class BucketD {
return this;
}
createBucketsWithOwner(buckets) {
const { splitter } = constants;
this._buckets = buckets.map(
({ name, owner }) => ({
key: `${owner}${splitter}${name}`,
value: JSON.stringify({
creationDate: new Date(),
name,
owner,
ownerDisplayName: 'steve',
}),
}),
);
return this._app;
}
_getUsersBucketResponse(req) {
const body = {
CommonPrefixes: [],
@ -129,14 +151,24 @@ class BucketD {
});
this._app.get('/default/attributes/:bucketName', (req, res) => {
const key = req.params.bucketName;
const bucket = this._bucketContent[key];
const { splitter } = constants;
const { bucketName } = req.params;
let filterKey = bucketName;
if (bucketName.indexOf(mpuBucketPrefix) !== -1) {
filterKey = bucketName.replace(mpuBucketPrefix, '');
}
const bucket = this._buckets
.reduce(
(prev, b) => (
b.key.split(splitter)[1] === filterKey
? JSON.parse(b.value)
: prev),
null,
);
if (bucket) {
res.status(200).send({
name: key,
owner: CANONICAL_ID,
ownerDisplayName: 'steve',
creationDate: new Date(),
...bucket,
name: bucketName,
});
return;
}
@ -157,8 +189,10 @@ class BucketD {
}
end() {
if (this._server !== null) {
this._server.close();
}
}
reset() {
this._bucketCount = 0;

312
tests/utils/vaultclient.js Normal file
View File

@ -0,0 +1,312 @@
/* eslint-disable no-undef-init */
const { IAM } = require('aws-sdk');
const vaultclient = require('vaultclient');
const fs = require('fs');
const uuid = require('uuid');
const adminCredentials = {
accessKey: 'D4IT2AWSB588GO5J9T00',
secretKey: 'UEEu8tYlsOGGrgf4DAiSZD6apVNPUWqRiPG0nTB6',
};
const internalServiceAccountId = '000000000000';
const internalServiceAccountName = 'scality-internal-services';
const internalServiceAccountEmail = 'scality@internal';
const internalServiceUserName = 'service-utapi-user';
class VaultClient {
/**
* Get endpoint information
*
* @return {object} Vault endpoint information
*/
static getEndpointInformation() {
let host = '127.0.0.1';
let port = 8600;
let ca = undefined;
let cert = undefined;
let key = undefined;
if (process.env.VAULT_ENDPOINT) {
const res = /^https?:\/\/([^:]*)(:[0-9]+)?\/?$/.exec(
process.env.VAULT_ENDPOINT,
);
// eslint-disable-next-line prefer-destructuring
[host] = res[1];
port = parseInt(res[2].substring(1), 10);
const https = process.env.VAULT_ENDPOINT.startsWith('https://');
if (https) {
ca = fs.readFileSync(process.env.SSL_CA || '/conf/ca.crt',
'ascii');
cert = fs.readFileSync(process.env.SSL_CERT || '/conf/test.crt',
'ascii');
key = fs.readFileSync(process.env.SSL_KEY || '/conf/test.key',
'ascii');
}
}
return {
host,
port,
ca,
cert,
key,
};
}
/**
* Get an admin client
*
* @return {vaultclient.Client} Vault client for admin calls
*/
static getAdminClient() {
const info = VaultClient.getEndpointInformation();
const adminClient = new vaultclient.Client(info.host, info.port,
info.ca !== undefined, undefined, undefined, info.ca, false,
adminCredentials.accessKey, adminCredentials.secretKey);
return adminClient;
}
/**
* Get an s3 client
*
* @return {vaultclient.Client} Vault client for s3
*/
static getServiceClient() {
const info = VaultClient.getEndpointInformation();
const adminClient = new vaultclient.Client(info.host, info.port - 100,
info.ca !== undefined, info.key, info.cert, info.ca);
return adminClient;
}
static getIAMClient(credentials) {
const endpoint = process.env.VAULT_ENDPOINT || 'http://localhost:8600';
const info = {
endpoint,
sslEnabled: false,
region: 'us-east-1',
apiVersion: '2010-05-08',
signatureVersion: 'v4',
accessKeyId: credentials.accessKey,
secretAccessKey: credentials.secretKey,
maxRetries: 0,
};
return new IAM(info);
}
static async createAccount(name) {
const client = VaultClient.getAdminClient();
return new Promise((resolve, reject) =>
client.createAccount(
name,
{ email: `${name}@example.com` },
(err, res) => {
if (err) {
return reject(err);
}
resolve(res.account);
},
));
}
static async createAccountKeys(account) {
const client = VaultClient.getAdminClient();
return new Promise((resolve, reject) =>
client.generateAccountAccessKey(
account.name,
(err, res) => {
if (err) {
return reject(err);
}
return resolve({
accessKey: res.id,
secretKey: res.value,
});
},
));
}
static async createAccountAndKeys(name) {
const account = await VaultClient.createAccount(name);
const creds = await VaultClient.createAccountKeys(account);
return {
...account,
...creds,
};
}
static async createUser(parentAccount, name) {
const client = VaultClient.getIAMClient(parentAccount);
const { User: user } = await client.createUser({ UserName: name }).promise();
return {
name,
id: user.UserId,
arn: user.Arn,
account: user.Arn.split(':')[4],
};
}
static async createUserKeys(parentAccount, name) {
const client = VaultClient.getIAMClient(parentAccount);
const { AccessKey: creds } = await client.createAccessKey({ UserName: name }).promise();
return {
accessKey: creds.AccessKeyId,
secretKey: creds.SecretAccessKey,
};
}
static async createUserAndKeys(parentAccount, name) {
const user = await VaultClient.createUser(parentAccount, name);
const creds = await VaultClient.createUserKeys(parentAccount, name);
return {
...user,
...creds,
};
}
static templateUtapiPolicy(level, resource) {
return JSON.stringify({
Version: '2012-10-17',
Statement: [
{
Sid: `utapiMetrics-${uuid.v4()}`.replace(/-/g, ''),
Action: ['utapi:ListMetrics'],
Effect: 'Allow',
Resource: `arn:scality:utapi:::${level}/${resource}`,
},
],
});
}
static async createAndAttachUtapiPolicy(parentAccount, user, level, resource) {
const client = VaultClient.getIAMClient(parentAccount);
const PolicyDocument = VaultClient.templateUtapiPolicy(level, resource);
const PolicyName = `utapi-test-policy-${uuid.v4()}`;
const res = await client.createPolicy({ PolicyName, PolicyDocument }).promise();
const { Arn: PolicyArn } = res.Policy;
await client.attachUserPolicy({ PolicyArn, UserName: user.name }).promise();
}
static async createInternalServiceAccount() {
const client = VaultClient.getAdminClient();
return new Promise((resolve, reject) =>
client.createAccount(
internalServiceAccountName,
{
email: internalServiceAccountEmail,
externalAccountId: internalServiceAccountId,
disableSeed: true,
},
(err, res) => {
if (err) {
return reject(err);
}
return resolve(res.account);
},
));
}
static async createInternalServiceAccountAndKeys() {
const account = await VaultClient.createInternalServiceAccount();
const creds = await VaultClient.createAccountKeys(account);
return {
...account,
...creds,
};
}
static async getUserByName(parentAccount, name) {
const client = VaultClient.getIAMClient(parentAccount);
const { User: user } = await client.getUser({ UserName: name }).promise();
return {
name,
id: user.UserId,
arn: user.Arn,
account: user.Arn.split(':')[4],
};
}
static async getAttachedPolicies(parentAccount, user) {
const client = VaultClient.getIAMClient(parentAccount);
const res = await client.listAttachedUserPolicies({ UserName: user.name }).promise();
const { AttachedPolicies: attached } = res;
const policies = await Promise.all(
attached.map(
({ PolicyArn }) => client.getPolicyVersion({ PolicyArn, VersionId: 'v1' })
.promise()
.then(({ PolicyVersion }) => ({
arn: PolicyArn,
document: JSON.parse(decodeURIComponent(PolicyVersion.Document)),
})),
),
);
return policies;
}
static async getInternalServiceUserAndPolicies(parentAccount) {
const user = await VaultClient.getUserByName(parentAccount, internalServiceUserName);
const policies = await VaultClient.getAttachedPolicies(parentAccount, user);
return {
...user,
policies,
};
}
static async getAccountUsers(parentAccount) {
const client = VaultClient.getIAMClient(parentAccount);
const { Users } = await client.listUsers({}).promise();
return Users.map(user => ({
arn: user.Arn,
id: user.UserId,
name: user.UserName,
}));
}
static async detachUserPolicies(parentAccount, user) {
const client = VaultClient.getIAMClient(parentAccount);
const policies = await VaultClient.getAttachedPolicies(parentAccount, user);
return Promise.all(
policies.map(policy => client.detachUserPolicy({
PolicyArn: policy.arn,
UserName: user.name,
}).promise().then(() => policy.arn)),
);
}
static async deleteAccount(account) {
return new Promise(
(resolve, reject) => VaultClient
.getAdminClient()
.deleteAccount(
account.name,
(err, res) => {
if (err) {
reject(err);
return;
}
resolve(res);
},
),
);
}
static async cleanupUsers(parentAccount) {
const client = VaultClient.getIAMClient(parentAccount);
const users = await VaultClient.getAccountUsers(parentAccount);
await Promise.all(
users.map(async user => {
const detached = await VaultClient.detachUserPolicies(parentAccount, user);
await Promise.all(detached.map(PolicyArn => client.deletePolicy({ PolicyArn }).promise()));
await client.deleteUser({ UserName: user.name }).promise();
}),
);
}
static async cleanupAccountAndUsers(parentAccount) {
await VaultClient.cleanupUsers(parentAccount);
await VaultClient.deleteAccount(parentAccount);
}
}
module.exports = VaultClient;

347
yarn.lock
View File

@ -380,6 +380,11 @@
resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.5.tgz#b14efa8852b7768d898906613c23f688713e02cd"
integrity sha512-Q1y515GcOdTHgagaVFhHnIFQ38ygs/kmxdNpvpou+raI9UO3YZcHDngBSYKQklcKlvA7iuQlmIKbzvmxcOE9CQ==
"@types/emscripten@^1.38.0":
version "1.39.5"
resolved "https://registry.yarnpkg.com/@types/emscripten/-/emscripten-1.39.5.tgz#eb0fb1048301df980b6f8a5ec3d63f7d1572bb73"
integrity sha512-DIOOg+POSrYl+OlNRHQuIEqCd8DCtynG57H862UCce16nXJX7J8eWxNGgOcf8Eyge8zXeSs27mz1UcFu8L/L7g==
"@types/graphlib@^2.1.7":
version "2.1.7"
resolved "https://registry.yarnpkg.com/@types/graphlib/-/graphlib-2.1.7.tgz#e6a47a4f43511f5bad30058a669ce5ce93bfd823"
@ -439,11 +444,43 @@
resolved "https://registry.yarnpkg.com/@types/semver/-/semver-5.5.0.tgz#146c2a29ee7d3bae4bf2fcb274636e264c813c45"
integrity sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ==
"@yarnpkg/fslib@2.4.0", "@yarnpkg/fslib@^2.1.0":
version "2.4.0"
resolved "https://registry.yarnpkg.com/@yarnpkg/fslib/-/fslib-2.4.0.tgz#a265b737cd089ef293ad964e06c143f5efd411a9"
integrity sha512-CwffYY9owtl3uImNOn1K4jl5iIb/L16a9UZ9Q3lkBARk6tlUsPrNFX00eoUlFcLn49TTfd3zdN6higloGCyncw==
dependencies:
"@yarnpkg/libzip" "^2.2.1"
tslib "^1.13.0"
"@yarnpkg/json-proxy@2.1.0":
version "2.1.0"
resolved "https://registry.yarnpkg.com/@yarnpkg/json-proxy/-/json-proxy-2.1.0.tgz#362a161678cd7dda74b47b4fc848a2f1730d16cd"
integrity sha512-rOgCg2DkyviLgr80mUMTt9vzdf5RGOujQB26yPiXjlz4WNePLBshKlTNG9rKSoKQSOYEQcw6cUmosfOKDatrCw==
dependencies:
"@yarnpkg/fslib" "^2.1.0"
tslib "^1.13.0"
"@yarnpkg/libzip@2.2.1", "@yarnpkg/libzip@^2.2.1":
version "2.2.1"
resolved "https://registry.yarnpkg.com/@yarnpkg/libzip/-/libzip-2.2.1.tgz#61c9b8b2499ee6bd9c4fcbf8248f68e07bd89948"
integrity sha512-AYDJXrkzayoDd3ZlVgFJ+LyDX+Zj/cki3vxIpcYxejtgkl3aquVWOxlC0DD9WboBWsJFIP1MjrUbchLyh++/7A==
dependencies:
"@types/emscripten" "^1.38.0"
tslib "^1.13.0"
"@yarnpkg/lockfile@^1.1.0":
version "1.1.0"
resolved "https://registry.yarnpkg.com/@yarnpkg/lockfile/-/lockfile-1.1.0.tgz#e77a97fbd345b76d83245edcd17d393b1b41fb31"
integrity sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ==
"@yarnpkg/parsers@2.3.0":
version "2.3.0"
resolved "https://registry.yarnpkg.com/@yarnpkg/parsers/-/parsers-2.3.0.tgz#7b9564c6df02f4921d5cfe8287c4b648e93ea84b"
integrity sha512-qgz0QUgOvnhtF92kaluIhIIKBUHlYlHUBQxqh5v9+sxEQvUeF6G6PKiFlzo3E6O99XwvNEGpVu1xZPoSGyGscQ==
dependencies:
js-yaml "^3.10.0"
tslib "^1.13.0"
JSONStream@^1.0.0:
version "1.3.5"
resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0"
@ -552,6 +589,14 @@ agentkeepalive@^4.1.3:
depd "^1.1.2"
humanize-ms "^1.2.1"
ajv@4.10.0:
version "4.10.0"
resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.10.0.tgz#7ae6169180eb199192a8b9a19fd0f47fc9ac8764"
integrity sha1-euYWkYDrGZGSqLmhn9D0f8msh2Q=
dependencies:
co "^4.6.0"
json-stable-stringify "^1.0.1"
ajv@6.12.2:
version "6.12.2"
resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.2.tgz#c629c5eced17baf314437918d2da88c99d5958cd"
@ -665,31 +710,6 @@ arraybuffer.slice@~0.0.7:
resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675"
integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog==
arsenal@scality/Arsenal#580e25a:
version "7.5.0"
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/580e25a9e84acf2a2a8dec6c83f69f862804d85f"
dependencies:
"@hapi/joi" "^15.1.0"
JSONStream "^1.0.0"
ajv "6.12.2"
async "~2.1.5"
debug "~2.3.3"
diskusage "^1.1.1"
ioredis "4.9.5"
ipaddr.js "1.9.1"
level "~5.0.1"
level-sublevel "~6.6.5"
node-forge "^0.7.1"
simple-glob "^0.2"
socket.io "~2.3.0"
socket.io-client "~2.3.0"
utf8 "2.1.2"
uuid "^3.0.1"
werelogs scality/werelogs#0ff7ec82
xml2js "~0.4.23"
optionalDependencies:
ioctl "2.0.0"
arsenal@scality/Arsenal#65966f5:
version "7.7.0"
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/65966f5ddf93b048906d14a8c26056abfd4c22ba"
@ -716,13 +736,41 @@ arsenal@scality/Arsenal#65966f5:
optionalDependencies:
ioctl "2.0.0"
arsenal@scality/Arsenal#8ed8478:
version "8.2.1"
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/8ed84786fce31f603b1e8cd641b3b44b8f715b0b"
arsenal@scality/Arsenal#7.10.1:
version "7.10.1"
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/ce7bba1f8d8b4078335f65e8eaa49f6454ddbf46"
dependencies:
"@hapi/joi" "^15.1.0"
JSONStream "^1.0.0"
agentkeepalive "^4.1.3"
ajv "6.12.2"
async "~2.1.5"
base-x "3.0.8"
base62 "2.0.1"
debug "~2.6.9"
diskusage "^1.1.1"
ioredis "4.9.5"
ipaddr.js "1.9.1"
level "~5.0.1"
level-sublevel "~6.6.5"
node-forge "^0.7.1"
prom-client "10.2.3"
simple-glob "^0.2"
socket.io "~2.3.0"
socket.io-client "~2.3.0"
utf8 "2.1.2"
uuid "^3.0.1"
werelogs scality/werelogs#8.1.0
xml2js "~0.4.23"
optionalDependencies:
ioctl "2.0.0"
arsenal@scality/Arsenal#c57cde8:
version "8.1.4"
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/c57cde88bb04fe9803ec08c3a883f6eb986e4149"
dependencies:
JSONStream "^1.0.0"
ajv "4.10.0"
async "~2.6.1"
aws-sdk "2.80.0"
azure-storage "^2.1.0"
@ -734,20 +782,21 @@ arsenal@scality/Arsenal#8ed8478:
hdclient scality/hdclient#5145e04e5ed33e85106765b1caa90cd245ef482b
https-proxy-agent "^2.2.0"
ioredis "4.9.5"
ipaddr.js "1.9.1"
ipaddr.js "1.8.1"
joi "^14.3.0"
level "~5.0.1"
level-sublevel "~6.6.5"
mongodb "^3.0.1"
node-forge "^0.7.1"
prom-client "10.2.3"
simple-glob "^0.2.0"
socket.io "~2.3.0"
socket.io-client "~2.3.0"
sproxydclient "github:scality/sproxydclient#30e7115"
socket.io "~2.2.0"
socket.io-client "~2.2.0"
sproxydclient "github:scality/sproxydclient#a6ec980"
utf8 "3.0.0"
uuid "^3.0.1"
werelogs scality/werelogs#0ff7ec82
xml2js "~0.4.23"
xml2js "~0.4.16"
optionalDependencies:
ioctl "2.0.1"
@ -834,6 +883,21 @@ aws-sdk@2.80.0:
xml2js "0.4.17"
xmlbuilder "4.2.1"
aws-sdk@^2.1005.0:
version "2.1005.0"
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1005.0.tgz#963e9b5ddda982d7f2f1e59baf88a442dd5da9f8"
integrity sha512-S9IZLTEOTiV/juMOHvZdd6cpxRzmUYhFhJMLUGXIrAku9uZHsdVa3dDdgcbBsDmRZ4WAvlZlD+NvtTNwFCLraw==
dependencies:
buffer "4.9.2"
events "1.1.1"
ieee754 "1.1.13"
jmespath "0.15.0"
querystring "0.2.0"
sax "1.2.1"
url "0.10.3"
uuid "3.3.2"
xml2js "0.4.19"
aws-sign2@~0.7.0:
version "0.7.0"
resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8"
@ -876,6 +940,18 @@ balanced-match@^1.0.0:
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767"
integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c=
base-x@3.0.8:
version "3.0.8"
resolved "https://registry.yarnpkg.com/base-x/-/base-x-3.0.8.tgz#1e1106c2537f0162e8b52474a557ebb09000018d"
integrity sha512-Rl/1AWP4J/zRrk54hhlxH4drNxPJXYUaKffODVI53/dAsV4t9fBxyxYKAVPU1XBHxYwOWP9h9H0hM2MVw4YfJA==
dependencies:
safe-buffer "^5.0.1"
base62@2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/base62/-/base62-2.0.1.tgz#729cfe179ed34c61e4a489490105b44ce4ea1197"
integrity sha512-4t4WQK7mdbcWzqEBiq6tfo2qDCeIZGXvjifJZyxHIVcjQkZJxpFtu/pa2Va69OouCkg6izZ08hKnPxroeDyzew==
base64-arraybuffer@0.1.4:
version "0.1.4"
resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.4.tgz#9818c79e059b1355f97e0428a017c838e90ba812"
@ -891,6 +967,11 @@ base64-js@^1.0.2:
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.1.tgz#58ece8cb75dd07e71ed08c736abc5fac4dbf8df1"
integrity sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g==
base64id@1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/base64id/-/base64id-1.0.0.tgz#47688cb99bb6804f0e06d3e763b1c32e57d8e6b6"
integrity sha1-R2iMuZu2gE8OBtPnY7HDLlfY5rY=
base64id@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/base64id/-/base64id-2.0.0.tgz#2770ac6bc47d312af97a8bf9a634342e0cd25cb6"
@ -1036,7 +1117,7 @@ bucketclient@scality/bucketclient:
resolved "https://codeload.github.com/scality/bucketclient/tar.gz/97fe75a58b22e256856ea50fad72ca22a94d7096"
dependencies:
agentkeepalive "^4.1.3"
arsenal scality/Arsenal#8ed8478
arsenal scality/Arsenal#c57cde8
werelogs scality/werelogs#351a2a3
buffer-equal-constant-time@1.0.1:
@ -1058,6 +1139,15 @@ buffer@4.9.1:
ieee754 "^1.1.4"
isarray "^1.0.0"
buffer@4.9.2:
version "4.9.2"
resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8"
integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==
dependencies:
base64-js "^1.0.2"
ieee754 "^1.1.4"
isarray "^1.0.0"
buffer@^5.1.0, buffer@^5.5.0, buffer@^5.6.0:
version "5.6.0"
resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.6.0.tgz#a31749dc7d81d84db08abf937b6b8c4033f62786"
@ -1554,13 +1644,6 @@ debug@^4.3.1:
dependencies:
ms "2.1.2"
debug@~2.3.3:
version "2.3.3"
resolved "https://registry.yarnpkg.com/debug/-/debug-2.3.3.tgz#40c453e67e6e13c901ddec317af8986cda9eff8c"
integrity sha1-QMRT5n5uE8kB3ewxeviYbNqe/4w=
dependencies:
ms "0.7.2"
debug@~4.1.0:
version "4.1.1"
resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791"
@ -1812,6 +1895,23 @@ end-of-stream@^1.0.0, end-of-stream@^1.1.0, end-of-stream@^1.4.1:
dependencies:
once "^1.4.0"
engine.io-client@~3.3.1:
version "3.3.3"
resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-3.3.3.tgz#aeb45695ced81b787a8a10c92b0bc226b1cb3c53"
integrity sha512-PXIgpzb1brtBzh8Q6vCjzCMeu4nfEPmaDm+L3Qb2sVHwLkxC1qRiBMSjOB0NJNjZ0hbPNUKQa+s8J2XxLOIEeQ==
dependencies:
component-emitter "1.2.1"
component-inherit "0.0.3"
debug "~3.1.0"
engine.io-parser "~2.1.1"
has-cors "1.1.0"
indexof "0.0.1"
parseqs "0.0.5"
parseuri "0.0.5"
ws "~6.1.0"
xmlhttprequest-ssl "~1.6.3"
yeast "0.1.2"
engine.io-client@~3.4.0:
version "3.4.4"
resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-3.4.4.tgz#77d8003f502b0782dd792b073a4d2cf7ca5ab967"
@ -1829,6 +1929,17 @@ engine.io-client@~3.4.0:
xmlhttprequest-ssl "~1.5.4"
yeast "0.1.2"
engine.io-parser@~2.1.0, engine.io-parser@~2.1.1:
version "2.1.3"
resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-2.1.3.tgz#757ab970fbf2dfb32c7b74b033216d5739ef79a6"
integrity sha512-6HXPre2O4Houl7c4g7Ic/XzPnHBvaEmN90vtRO9uLmwtRqQmTOw0QMevL1TOfL2Cpu1VzsaTmMotQgMdkzGkVA==
dependencies:
after "0.8.2"
arraybuffer.slice "~0.0.7"
base64-arraybuffer "0.1.5"
blob "0.0.5"
has-binary2 "~1.0.2"
engine.io-parser@~2.2.0:
version "2.2.1"
resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-2.2.1.tgz#57ce5611d9370ee94f99641b589f94c97e4f5da7"
@ -1840,6 +1951,18 @@ engine.io-parser@~2.2.0:
blob "0.0.5"
has-binary2 "~1.0.2"
engine.io@~3.3.1:
version "3.3.2"
resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-3.3.2.tgz#18cbc8b6f36e9461c5c0f81df2b830de16058a59"
integrity sha512-AsaA9KG7cWPXWHp5FvHdDWY3AMWeZ8x+2pUVLcn71qE5AtAzgGbxuclOytygskw8XGmiQafTmnI9Bix3uihu2w==
dependencies:
accepts "~1.3.4"
base64id "1.0.0"
cookie "0.3.1"
debug "~3.1.0"
engine.io-parser "~2.1.0"
ws "~6.1.0"
engine.io@~3.4.0:
version "3.4.2"
resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-3.4.2.tgz#8fc84ee00388e3e228645e0a7d3dfaeed5bd122c"
@ -2129,6 +2252,11 @@ event-loop-spinner@^2.0.0:
dependencies:
tslib "^1.10.0"
events@1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/events/-/events-1.1.1.tgz#9ebdb7635ad099c70dcc4c2a1f5004288e8bd924"
integrity sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=
execa@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8"
@ -2619,6 +2747,11 @@ he@1.1.1:
resolved "https://registry.yarnpkg.com/he/-/he-1.1.1.tgz#93410fd21b009735151f8868c2f271f3427e23fd"
integrity sha1-k0EP0hsAlzUVH4howvJx80J+I/0=
hoek@6.x.x:
version "6.1.3"
resolved "https://registry.yarnpkg.com/hoek/-/hoek-6.1.3.tgz#73b7d33952e01fe27a38b0457294b79dd8da242c"
integrity sha512-YXXAAhmF9zpQbC7LEcREFtXfGq5K1fmd+4PHkBq8NUqmzW3G+Dq10bI/i0KucLRwss3YYFQ0fSfoxBZYiGUqtQ==
hosted-git-info@^2.1.4, hosted-git-info@^2.7.1:
version "2.8.8"
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.8.tgz#7539bd4bc1e0e0a895815a2e0262420b12858488"
@ -2713,7 +2846,7 @@ iconv-lite@0.4.24, iconv-lite@^0.4.24, iconv-lite@^0.4.4:
dependencies:
safer-buffer ">= 2.1.2 < 3"
ieee754@^1.1.4:
ieee754@1.1.13, ieee754@^1.1.4:
version "1.1.13"
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84"
integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==
@ -2885,6 +3018,11 @@ ip@1.1.5, ip@^1.1.5:
resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a"
integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo=
ipaddr.js@1.8.1:
version "1.8.1"
resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.8.1.tgz#fa4b79fa47fd3def5e3b159825161c0a519c9427"
integrity sha1-+kt5+kf9Pe9eOxWYJRYcClGclCc=
ipaddr.js@1.9.1:
version "1.9.1"
resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3"
@ -3095,6 +3233,13 @@ isarray@^1.0.0, isarray@~1.0.0:
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=
isemail@3.x.x:
version "3.2.0"
resolved "https://registry.yarnpkg.com/isemail/-/isemail-3.2.0.tgz#59310a021931a9fb06bbb51e155ce0b3f236832c"
integrity sha512-zKqkK+O+dGqevc93KNsbZ/TqTUFd46MwWjYOoMrjIMZ51eU7DtQG3Wmd9SQQT7i7RVnuTPEiYEWHU3MSbxC1Tg==
dependencies:
punycode "2.x.x"
isexe@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
@ -3110,11 +3255,28 @@ jmespath@0.15.0:
resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.15.0.tgz#a3f222a9aae9f966f5d27c796510e28091764217"
integrity sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=
joi@^14.3.0:
version "14.3.1"
resolved "https://registry.yarnpkg.com/joi/-/joi-14.3.1.tgz#164a262ec0b855466e0c35eea2a885ae8b6c703c"
integrity sha512-LQDdM+pkOrpAn4Lp+neNIFV3axv1Vna3j38bisbQhETPMANYRbFJFUyOZcOClYvM/hppMhGWuKSFEK9vjrB+bQ==
dependencies:
hoek "6.x.x"
isemail "3.x.x"
topo "3.x.x"
js-tokens@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==
js-yaml@^3.10.0:
version "3.14.1"
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537"
integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==
dependencies:
argparse "^1.0.7"
esprima "^4.0.0"
js-yaml@^3.13.0, js-yaml@^3.13.1, js-yaml@^3.14.0:
version "3.14.0"
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.0.tgz#a7a34170f26a21bb162424d8adacb4113a69e482"
@ -3192,6 +3354,13 @@ json-stable-stringify-without-jsonify@^1.0.1:
resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651"
integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=
json-stable-stringify@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz#9a759d39c5f2ff503fd5300646ed445f88c4f9af"
integrity sha1-mnWdOcXy/1A/1TAGRu1EX4jE+a8=
dependencies:
jsonify "~0.0.0"
json-stringify-safe@~5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
@ -3202,6 +3371,11 @@ json3@3.3.2:
resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.2.tgz#3c0434743df93e2f5c42aee7b19bcb483575f4e1"
integrity sha1-PAQ0dD35Pi9cQq7nsZvLSDV19OE=
jsonify@~0.0.0:
version "0.0.0"
resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73"
integrity sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM=
jsonparse@^1.2.0:
version "1.3.1"
resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280"
@ -3953,11 +4127,6 @@ mpath@~0.5.0:
resolved "https://registry.yarnpkg.com/mpath/-/mpath-0.5.2.tgz#b1eac586dffb5175d2f51ca9aacba35d9940dd41"
integrity sha512-NOeCoW6AYc3hLi30npe7uzbD9b4FQZKH40YKABUCCvaKKL5agj6YzvHoNx8jQpDMNPgIa5bvSZQbQpWBAVD0Kw==
ms@0.7.2:
version "0.7.2"
resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.2.tgz#ae25cf2512b3885a1d95d7f037868d8431124765"
integrity sha1-riXPJRKziFodldfwN4aNhDESR2U=
ms@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
@ -4721,7 +4890,7 @@ punycode@1.3.2:
resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d"
integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=
punycode@^2.1.0, punycode@^2.1.1:
punycode@2.x.x, punycode@^2.1.0, punycode@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
@ -5530,6 +5699,26 @@ socket.io-adapter@~1.1.0:
resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-1.1.2.tgz#ab3f0d6f66b8fc7fca3959ab5991f82221789be9"
integrity sha512-WzZRUj1kUjrTIrUKpZLEzFZ1OLj5FwLlAFQs9kuZJzJi5DKdU7FsWc36SNmA8iDOtwBQyT8FkrriRM8vXLYz8g==
socket.io-client@2.2.0, socket.io-client@~2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-2.2.0.tgz#84e73ee3c43d5020ccc1a258faeeb9aec2723af7"
integrity sha512-56ZrkTDbdTLmBIyfFYesgOxsjcLnwAKoN4CiPyTVkMQj3zTUh0QAx3GbvIvLpFEOvQWu92yyWICxB0u7wkVbYA==
dependencies:
backo2 "1.0.2"
base64-arraybuffer "0.1.5"
component-bind "1.0.0"
component-emitter "1.2.1"
debug "~3.1.0"
engine.io-client "~3.3.1"
has-binary2 "~1.0.2"
has-cors "1.1.0"
indexof "0.0.1"
object-component "0.0.3"
parseqs "0.0.5"
parseuri "0.0.5"
socket.io-parser "~3.3.0"
to-array "0.1.4"
socket.io-client@2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-2.3.0.tgz#14d5ba2e00b9bcd145ae443ab96b3f86cbcc1bb4"
@ -5585,6 +5774,18 @@ socket.io-parser@~3.4.0:
debug "~4.1.0"
isarray "2.0.1"
socket.io@~2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-2.2.0.tgz#f0f633161ef6712c972b307598ecd08c9b1b4d5b"
integrity sha512-wxXrIuZ8AILcn+f1B4ez4hJTPG24iNgxBBDaJfT6MsyOhVYiTXWexGoPkd87ktJG8kQEcL/NBvRi64+9k4Kc0w==
dependencies:
debug "~4.1.0"
engine.io "~3.3.1"
has-binary2 "~1.0.2"
socket.io-adapter "~1.1.0"
socket.io-client "2.2.0"
socket.io-parser "~3.3.0"
socket.io@~2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-2.3.0.tgz#cd762ed6a4faeca59bc1f3e243c0969311eb73fb"
@ -5674,12 +5875,11 @@ sprintf-js@~1.0.2:
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=
"sproxydclient@github:scality/sproxydclient#30e7115":
version "8.0.2"
resolved "https://codeload.github.com/scality/sproxydclient/tar.gz/30e7115668bc7e10b4ec3cfdbaa7a124cdc21cc5"
"sproxydclient@github:scality/sproxydclient#a6ec980":
version "7.4.0"
resolved "https://codeload.github.com/scality/sproxydclient/tar.gz/a6ec98079fcbfde113de3f3afdcb57835d2ac55f"
dependencies:
async "^3.1.0"
werelogs scality/werelogs#351a2a3
werelogs scality/werelogs#0ff7ec82
ssh2-streams@~0.4.10:
version "0.4.10"
@ -6023,6 +6223,13 @@ toml@^3.0.0:
resolved "https://registry.yarnpkg.com/toml/-/toml-3.0.0.tgz#342160f1af1904ec9d204d03a5d61222d762c5ee"
integrity sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w==
topo@3.x.x:
version "3.0.3"
resolved "https://registry.yarnpkg.com/topo/-/topo-3.0.3.tgz#d5a67fb2e69307ebeeb08402ec2a2a6f5f7ad95c"
integrity sha512-IgpPtvD4kjrJ7CRA3ov2FhWQADwv+Tdqbsf1ZnPUSAtCJ9e1Z44MmoSGDXGk4IppoZA7jd/QRkNddlLJWlUZsQ==
dependencies:
hoek "6.x.x"
touch@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/touch/-/touch-3.1.0.tgz#fe365f5f75ec9ed4e56825e0bb76d24ab74af83b"
@ -6248,6 +6455,11 @@ uuid@3.0.1:
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.0.1.tgz#6544bba2dfda8c1cf17e629a3a305e2bb1fee6c1"
integrity sha1-ZUS7ot/ajBzxfmKaOjBeK7H+5sE=
uuid@3.3.2:
version "3.3.2"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131"
integrity sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==
uuid@^3.0.0, uuid@^3.0.1, uuid@^3.3.2:
version "3.4.0"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee"
@ -6291,14 +6503,14 @@ vary@~1.1.2:
resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc"
integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=
vaultclient@scality/vaultclient#ff9e92f:
version "7.5.1"
resolved "https://codeload.github.com/scality/vaultclient/tar.gz/ff9e92fd8e35c16bbc20f5e4ee0531d8bffedbf2"
vaultclient@scality/vaultclient#9013faab804a7ee61bd8bc4c71de89c3498bb6c0:
version "7.10.1"
resolved "https://codeload.github.com/scality/vaultclient/tar.gz/9013faab804a7ee61bd8bc4c71de89c3498bb6c0"
dependencies:
agentkeepalive "^4.1.3"
arsenal scality/Arsenal#580e25a
arsenal scality/Arsenal#7.10.1
commander "2.20.0"
werelogs scality/werelogs#4e0d97c
werelogs scality/werelogs#8.1.0
xml2js "0.4.19"
verror@1.10.0:
@ -6315,12 +6527,6 @@ vscode-languageserver-types@^3.15.1:
resolved "https://registry.yarnpkg.com/vscode-languageserver-types/-/vscode-languageserver-types-3.15.1.tgz#17be71d78d2f6236d414f0001ce1ef4d23e6b6de"
integrity sha512-+a9MPUQrNGRrGU630OGbYVQ+11iOIovjCkqxajPa9w57Sd5ruK8WQNsslzpa0x/QJqC8kRc2DUxWjIFwoNm4ZQ==
werelogs@scality/werelogs#0a4c576:
version "7.4.1"
resolved "https://codeload.github.com/scality/werelogs/tar.gz/0a4c57658f9cf56838628f3a328cdee5f5b5adc3"
dependencies:
safe-json-stringify "1.0.3"
werelogs@scality/werelogs#0ff7ec82:
version "7.4.0"
resolved "https://codeload.github.com/scality/werelogs/tar.gz/0ff7ec82f0deb1e472d8285fb7cc9ebde72c5f61"
@ -6333,9 +6539,9 @@ werelogs@scality/werelogs#351a2a3:
dependencies:
safe-json-stringify "1.0.3"
werelogs@scality/werelogs#4e0d97c:
version "7.4.1"
resolved "https://codeload.github.com/scality/werelogs/tar.gz/4e0d97cf69ea7ed60bea90756278513e7e7ea9b1"
werelogs@scality/werelogs#8.1.0:
version "8.1.0"
resolved "https://codeload.github.com/scality/werelogs/tar.gz/e8f828725642c54c511cdbe580b18f43d3589313"
dependencies:
safe-json-stringify "1.0.3"
@ -6483,7 +6689,7 @@ xml2js@0.4.19:
sax ">=0.6.0"
xmlbuilder "~9.0.1"
xml2js@0.4.23, xml2js@^0.4.17, xml2js@~0.4.23:
xml2js@0.4.23, xml2js@^0.4.17, xml2js@~0.4.16, xml2js@~0.4.23:
version "0.4.23"
resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.23.tgz#a0c69516752421eb2ac758ee4d4ccf58843eac66"
integrity sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==
@ -6513,6 +6719,11 @@ xmlhttprequest-ssl@~1.5.4:
resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.5.tgz#c2876b06168aadc40e57d97e81191ac8f4398b3e"
integrity sha1-wodrBhaKrcQOV9l+gRkayPQ5iz4=
xmlhttprequest-ssl@~1.6.3:
version "1.6.3"
resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.6.3.tgz#03b713873b01659dfa2c1c5d056065b27ddc2de6"
integrity sha512-3XfeQE/wNkvrIktn2Kf0869fC0BN6UpydVasGIeSm2B1Llihf7/0UfZM+eCkOw3P7bP4+qPgqhm7ZoxuJtFU0Q==
xregexp@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/xregexp/-/xregexp-2.0.0.tgz#52a63e56ca0b84a7f3a5f3d61872f126ad7a5943"