Compare commits
48 Commits
developmen
...
add-crr-st
Author | SHA1 | Date |
---|---|---|
Bennett Buchanan | 00b0f6f771 | |
Bennett Buchanan | 362ff6b562 | |
Bennett Buchanan | 57fbdfa610 | |
Bennett Buchanan | 0db50e10f4 | |
Bennett Buchanan | c955a79367 | |
Rached Ben Mustapha | 7efbe2f04c | |
LaureVergeron | cf82585949 | |
VR | 05bac591b5 | |
vrancurel | 12a476bdc9 | |
VR | fcc71e14df | |
vrancurel | 87a9f7502b | |
Lauren Spiegel | e0dad964f6 | |
Lauren Spiegel | 7aaba13bb5 | |
Lauren Spiegel | 7a22c73cff | |
Lauren Spiegel | 6b56a80208 | |
Lauren Spiegel | 27c9dcd03b | |
Lauren Spiegel | 93b67fd5c4 | |
Lauren Spiegel | 291b09605d | |
JianqinWang | 448d0e870f | |
Rached Ben Mustapha | f4a412239b | |
Rached Ben Mustapha | 9f94ecb5af | |
Rached Ben Mustapha | f8af109038 | |
Rached Ben Mustapha | bafbfc18ad | |
Rached Ben Mustapha | 0733e3e546 | |
Rached Ben Mustapha | 18fa965576 | |
Rached Ben Mustapha | 8a9925f8da | |
Rached Ben Mustapha | 73dc856211 | |
Rached Ben Mustapha | be60f5dbc9 | |
Rached Ben Mustapha | 6997397c47 | |
Rached Ben Mustapha | c9c2599c76 | |
Rached Ben Mustapha | 8ed358e86d | |
Rached Ben Mustapha | 1d775113a4 | |
Rached Ben Mustapha | 1cc15169c9 | |
Rached Ben Mustapha | 9ad59a97c0 | |
Rached Ben Mustapha | 23e2e05b0d | |
Rached Ben Mustapha | d606630f92 | |
Rached Ben Mustapha | 00582df4d8 | |
Rached Ben Mustapha | d5b64614b8 | |
Rached Ben Mustapha | 90d3ed71d9 | |
Rached Ben Mustapha | cc782944e3 | |
Rached Ben Mustapha | 7481e78a06 | |
Rached Ben Mustapha | a06919f497 | |
Rached Ben Mustapha | 4525f98c3a | |
Rached Ben Mustapha | a31019d677 | |
Rached Ben Mustapha | d24ea35da3 | |
Rached Ben Mustapha | 80a71561ed | |
Rached Ben Mustapha | 1550ca9a0f | |
Rached Ben Mustapha | 7c65aa916e |
|
@ -0,0 +1,3 @@
|
||||||
|
node_modules
|
||||||
|
localData/*
|
||||||
|
localMetadata/*
|
|
@ -20,4 +20,4 @@ VOLUME ["/usr/src/app/localData","/usr/src/app/localMetadata"]
|
||||||
ENTRYPOINT ["/usr/src/app/docker-entrypoint.sh"]
|
ENTRYPOINT ["/usr/src/app/docker-entrypoint.sh"]
|
||||||
CMD [ "npm", "start" ]
|
CMD [ "npm", "start" ]
|
||||||
|
|
||||||
EXPOSE 8000
|
EXPOSE 8001
|
||||||
|
|
|
@ -19,4 +19,4 @@ ENV S3BACKEND mem
|
||||||
ENTRYPOINT ["/usr/src/app/docker-entrypoint.sh"]
|
ENTRYPOINT ["/usr/src/app/docker-entrypoint.sh"]
|
||||||
CMD [ "npm", "start" ]
|
CMD [ "npm", "start" ]
|
||||||
|
|
||||||
EXPOSE 8000
|
EXPOSE 8001
|
||||||
|
|
|
@ -0,0 +1,100 @@
|
||||||
|
#!/bin/sh
|
||||||
|
// 2>/dev/null ; exec "$(which nodejs 2>/dev/null || which node)" "$0" "$@"
|
||||||
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
|
const { auth } = require('arsenal');
|
||||||
|
const commander = require('commander');
|
||||||
|
|
||||||
|
const http = require('http');
|
||||||
|
const https = require('https');
|
||||||
|
const logger = require('../lib/utilities/logger');
|
||||||
|
|
||||||
|
function _performSearch(host,
|
||||||
|
port,
|
||||||
|
bucketName,
|
||||||
|
query,
|
||||||
|
accessKey,
|
||||||
|
secretKey,
|
||||||
|
verbose, ssl) {
|
||||||
|
const escapedSearch = encodeURIComponent(query);
|
||||||
|
const options = {
|
||||||
|
host,
|
||||||
|
port,
|
||||||
|
method: 'GET',
|
||||||
|
path: `/${bucketName}/?search=${escapedSearch}`,
|
||||||
|
headers: {
|
||||||
|
'Content-Length': 0,
|
||||||
|
},
|
||||||
|
rejectUnauthorized: false,
|
||||||
|
};
|
||||||
|
const transport = ssl ? https : http;
|
||||||
|
const request = transport.request(options, response => {
|
||||||
|
if (verbose) {
|
||||||
|
logger.info('response status code', {
|
||||||
|
statusCode: response.statusCode,
|
||||||
|
});
|
||||||
|
logger.info('response headers', { headers: response.headers });
|
||||||
|
}
|
||||||
|
const body = [];
|
||||||
|
response.setEncoding('utf8');
|
||||||
|
response.on('data', chunk => body.push(chunk));
|
||||||
|
response.on('end', () => {
|
||||||
|
if (response.statusCode >= 200 && response.statusCode < 300) {
|
||||||
|
logger.info('Success');
|
||||||
|
process.stdout.write(body.join(''));
|
||||||
|
process.exit(0);
|
||||||
|
} else {
|
||||||
|
logger.error('request failed with HTTP Status ', {
|
||||||
|
statusCode: response.statusCode,
|
||||||
|
body: body.join(''),
|
||||||
|
});
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
// generateV4Headers exepects request object with path that does not
|
||||||
|
// include query
|
||||||
|
request.path = `/${bucketName}`;
|
||||||
|
auth.client.generateV4Headers(request, { search: query },
|
||||||
|
accessKey, secretKey, 's3');
|
||||||
|
request.path = `/${bucketName}?search=${escapedSearch}`;
|
||||||
|
if (verbose) {
|
||||||
|
logger.info('request headers', { headers: request._headers });
|
||||||
|
}
|
||||||
|
request.end();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function is used as a binary to send a request to S3 to perform a
|
||||||
|
* search on the objects in a bucket
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
function searchBucket() {
|
||||||
|
// TODO: Include other bucket listing possible query params?
|
||||||
|
commander
|
||||||
|
.version('0.0.1')
|
||||||
|
.option('-a, --access-key <accessKey>', 'Access key id')
|
||||||
|
.option('-k, --secret-key <secretKey>', 'Secret access key')
|
||||||
|
.option('-b, --bucket <bucket>', 'Name of the bucket')
|
||||||
|
.option('-q, --query <query>', 'Search query')
|
||||||
|
.option('-h, --host <host>', 'Host of the server')
|
||||||
|
.option('-p, --port <port>', 'Port of the server')
|
||||||
|
.option('-s', '--ssl', 'Enable ssl')
|
||||||
|
.option('-v, --verbose')
|
||||||
|
.parse(process.argv);
|
||||||
|
|
||||||
|
const { host, port, accessKey, secretKey, bucket, query, verbose, ssl } =
|
||||||
|
commander;
|
||||||
|
|
||||||
|
if (!host || !port || !accessKey || !secretKey || !bucket || !query) {
|
||||||
|
logger.error('missing parameter');
|
||||||
|
commander.outputHelp();
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
_performSearch(host, port, bucket, query, accessKey, secretKey, verbose,
|
||||||
|
ssl);
|
||||||
|
}
|
||||||
|
|
||||||
|
searchBucket();
|
|
@ -19,5 +19,16 @@
|
||||||
"access": "accessKey2",
|
"access": "accessKey2",
|
||||||
"secret": "verySecretKey2"
|
"secret": "verySecretKey2"
|
||||||
}]
|
}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Clueso",
|
||||||
|
"email": "inspector@clueso.info",
|
||||||
|
"arn": "arn:aws:iam::123456789014:root",
|
||||||
|
"canonicalID": "http://acs.zenko.io/accounts/service/clueso",
|
||||||
|
"shortid": "123456789014",
|
||||||
|
"keys": [{
|
||||||
|
"access": "cluesoKey1",
|
||||||
|
"secret": "cluesoSecretKey1"
|
||||||
|
}]
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
|
|
23
config.json
23
config.json
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"port": 8000,
|
"port": 8001,
|
||||||
"listenOn": [],
|
"listenOn": [],
|
||||||
"replicationGroupId": "RG001",
|
"replicationGroupId": "RG001",
|
||||||
"restEndpoints": {
|
"restEndpoints": {
|
||||||
|
@ -8,7 +8,9 @@
|
||||||
"cloudserver-front": "us-east-1",
|
"cloudserver-front": "us-east-1",
|
||||||
"s3.docker.test": "us-east-1",
|
"s3.docker.test": "us-east-1",
|
||||||
"127.0.0.2": "us-east-1",
|
"127.0.0.2": "us-east-1",
|
||||||
"s3.amazonaws.com": "us-east-1"
|
"s3.amazonaws.com": "us-east-1",
|
||||||
|
"zenko-cloudserver-replicator": "us-east-1",
|
||||||
|
"lb": "us-east-1"
|
||||||
},
|
},
|
||||||
"websiteEndpoints": ["s3-website-us-east-1.amazonaws.com",
|
"websiteEndpoints": ["s3-website-us-east-1.amazonaws.com",
|
||||||
"s3-website.us-east-2.amazonaws.com",
|
"s3-website.us-east-2.amazonaws.com",
|
||||||
|
@ -45,37 +47,36 @@
|
||||||
"host": "localhost",
|
"host": "localhost",
|
||||||
"port": 8500
|
"port": 8500
|
||||||
},
|
},
|
||||||
"clusters": 10,
|
"clusters": 1,
|
||||||
"log": {
|
"log": {
|
||||||
"logLevel": "info",
|
"logLevel": "info",
|
||||||
"dumpLevel": "error"
|
"dumpLevel": "error"
|
||||||
},
|
},
|
||||||
"healthChecks": {
|
"healthChecks": {
|
||||||
"allowFrom": ["127.0.0.1/8", "::1"]
|
"allowFrom": ["127.0.0.1/8", "::1", "0.0.0.0/0"]
|
||||||
},
|
},
|
||||||
"metadataClient": {
|
"metadataClient": {
|
||||||
"host": "localhost",
|
"host": "localhost",
|
||||||
"port": 9990
|
"port": 9993
|
||||||
},
|
},
|
||||||
"dataClient": {
|
"dataClient": {
|
||||||
"host": "localhost",
|
"host": "localhost",
|
||||||
"port": 9991
|
"port": 9992
|
||||||
},
|
},
|
||||||
"metadataDaemon": {
|
"metadataDaemon": {
|
||||||
"bindAddress": "localhost",
|
"bindAddress": "localhost",
|
||||||
"port": 9990
|
"port": 9993
|
||||||
},
|
},
|
||||||
"dataDaemon": {
|
"dataDaemon": {
|
||||||
"bindAddress": "localhost",
|
"bindAddress": "localhost",
|
||||||
"port": 9991
|
"port": 9992
|
||||||
},
|
},
|
||||||
"recordLog": {
|
"recordLog": {
|
||||||
"enabled": false,
|
"enabled": true,
|
||||||
"recordLogName": "s3-recordlog"
|
"recordLogName": "s3-recordlog"
|
||||||
},
|
},
|
||||||
"mongodb": {
|
"mongodb": {
|
||||||
"host": "localhost",
|
"hosts": "localhost:27018",
|
||||||
"port": 27018,
|
|
||||||
"writeConcern": "majority",
|
"writeConcern": "majority",
|
||||||
"replicaSet": "",
|
"replicaSet": "",
|
||||||
"readPreference": "primary",
|
"readPreference": "primary",
|
||||||
|
|
|
@ -115,7 +115,7 @@ const constants = {
|
||||||
// for external backends, don't call unless at least 1 minute
|
// for external backends, don't call unless at least 1 minute
|
||||||
// (60,000 milliseconds) since last call
|
// (60,000 milliseconds) since last call
|
||||||
externalBackendHealthCheckInterval: 60000,
|
externalBackendHealthCheckInterval: 60000,
|
||||||
versioningNotImplBackends: { azure: true },
|
versioningNotImplBackends: {},
|
||||||
mpuMDStoredExternallyBackend: { aws_s3: true },
|
mpuMDStoredExternallyBackend: { aws_s3: true },
|
||||||
/* eslint-enable camelcase */
|
/* eslint-enable camelcase */
|
||||||
mpuMDStoredOnS3Backend: { azure: true },
|
mpuMDStoredOnS3Backend: { azure: true },
|
||||||
|
|
|
@ -12,7 +12,8 @@ if (config.backends.data === 'file' ||
|
||||||
port: config.dataDaemon.port,
|
port: config.dataDaemon.port,
|
||||||
dataStore: new arsenal.storage.data.file.DataFileStore(
|
dataStore: new arsenal.storage.data.file.DataFileStore(
|
||||||
{ dataPath: config.dataDaemon.dataPath,
|
{ dataPath: config.dataDaemon.dataPath,
|
||||||
log: config.log }),
|
log: config.log,
|
||||||
|
noSync: true }),
|
||||||
log: config.log });
|
log: config.log });
|
||||||
dataServer.setup(err => {
|
dataServer.setup(err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
|
|
@ -81,13 +81,8 @@ if [[ "$METADATA_HOST" ]]; then
|
||||||
JQ_FILTERS_CONFIG="$JQ_FILTERS_CONFIG | .metadataClient.host=\"$METADATA_HOST\""
|
JQ_FILTERS_CONFIG="$JQ_FILTERS_CONFIG | .metadataClient.host=\"$METADATA_HOST\""
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$MONGODB_HOST" ]]; then
|
if [[ "$MONGODB_HOSTS" ]]; then
|
||||||
JQ_FILTERS_CONFIG="$JQ_FILTERS_CONFIG | .mongodb.host=\"$MONGODB_HOST\""
|
JQ_FILTERS_CONFIG="$JQ_FILTERS_CONFIG | .mongodb.hosts=\"$MONGODB_HOSTS\""
|
||||||
JQ_FILTERS_CONFIG="$JQ_FILTERS_CONFIG | .mongodb.port=27017"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ "$MONGODB_PORT" ]]; then
|
|
||||||
JQ_FILTERS_CONFIG="$JQ_FILTERS_CONFIG | .mongodb.port=$MONGODB_PORT"
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$MONGODB_RS" ]]; then
|
if [[ "$MONGODB_RS" ]]; then
|
||||||
|
@ -112,6 +107,10 @@ if [[ $JQ_FILTERS_CONFIG != "." ]]; then
|
||||||
mv config.json.tmp config.json
|
mv config.json.tmp config.json
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if test -v INITIAL_INSTANCE_ID && test -v S3METADATAPATH && ! test -f ${S3METADATAPATH}/uuid ; then
|
||||||
|
echo -n ${INITIAL_INSTANCE_ID} > ${S3METADATAPATH}/uuid
|
||||||
|
fi
|
||||||
|
|
||||||
# s3 secret credentials for Zenko
|
# s3 secret credentials for Zenko
|
||||||
if [ -r /run/secrets/s3-credentials ] ; then
|
if [ -r /run/secrets/s3-credentials ] ; then
|
||||||
. /run/secrets/s3-credentials
|
. /run/secrets/s3-credentials
|
||||||
|
|
|
@ -820,6 +820,7 @@ class Config extends EventEmitter {
|
||||||
process.env.REPORT_TOKEN ||
|
process.env.REPORT_TOKEN ||
|
||||||
config.reportToken ||
|
config.reportToken ||
|
||||||
uuid.v4().toString();
|
uuid.v4().toString();
|
||||||
|
this.reportEndpoint = process.env.REPORT_ENDPOINT;
|
||||||
}
|
}
|
||||||
|
|
||||||
_configureBackends() {
|
_configureBackends() {
|
||||||
|
@ -827,7 +828,7 @@ class Config extends EventEmitter {
|
||||||
* Configure the backends for Authentication, Data and Metadata.
|
* Configure the backends for Authentication, Data and Metadata.
|
||||||
*/
|
*/
|
||||||
let auth = 'mem';
|
let auth = 'mem';
|
||||||
let data = 'file';
|
let data = 'multiple';
|
||||||
let metadata = 'file';
|
let metadata = 'file';
|
||||||
let kms = 'file';
|
let kms = 'file';
|
||||||
if (process.env.S3BACKEND) {
|
if (process.env.S3BACKEND) {
|
||||||
|
@ -843,6 +844,9 @@ class Config extends EventEmitter {
|
||||||
}
|
}
|
||||||
if (process.env.S3VAULT) {
|
if (process.env.S3VAULT) {
|
||||||
auth = process.env.S3VAULT;
|
auth = process.env.S3VAULT;
|
||||||
|
this.authData = buildAuthDataAccount(
|
||||||
|
'7D7XKUZJBB1XFL6GZQVJ',
|
||||||
|
'uLVVkOUr1=zsf6gRyC+ZvdjNJF15un8wlDBlCuFW');
|
||||||
}
|
}
|
||||||
if (auth === 'file' || auth === 'mem' || auth === 'cdmi') {
|
if (auth === 'file' || auth === 'mem' || auth === 'cdmi') {
|
||||||
// Auth only checks for 'mem' since mem === file
|
// Auth only checks for 'mem' since mem === file
|
||||||
|
|
|
@ -94,7 +94,9 @@ function isObjAuthorized(bucket, objectMD, requestType, canonicalID) {
|
||||||
|
|
||||||
// User is already authorized on the bucket for FULL_CONTROL or WRITE or
|
// User is already authorized on the bucket for FULL_CONTROL or WRITE or
|
||||||
// bucket has canned ACL public-read-write
|
// bucket has canned ACL public-read-write
|
||||||
if (requestType === 'objectPut' || requestType === 'objectDelete') {
|
if (requestType === 'objectPut' || requestType === 'objectDelete' ||
|
||||||
|
requestType === 'ReplicateObject') {
|
||||||
|
// TODO remove ReplicateObject
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,77 @@
|
||||||
|
|
||||||
|
/*
|
||||||
|
This code is based on code from https://github.com/olehch/sqltomongo
|
||||||
|
with the following license:
|
||||||
|
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2016 Oleh
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A helper object to map SQL-like naming to MongoDB query syntax
|
||||||
|
*/
|
||||||
|
const exprMapper = {
|
||||||
|
'=': '$eq',
|
||||||
|
'<>': '$ne',
|
||||||
|
'>': '$gt',
|
||||||
|
'<': '$lt',
|
||||||
|
'>=': '$gte',
|
||||||
|
'<=': '$lte',
|
||||||
|
'LIKE': '$regex',
|
||||||
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Parses object with WHERE clause recursively
|
||||||
|
* and generates MongoDB `find` query object
|
||||||
|
*/
|
||||||
|
function parseWhere(root) {
|
||||||
|
const operator = Object.keys(root)[0];
|
||||||
|
|
||||||
|
// extract leaf binary expressions
|
||||||
|
if (operator === 'AND') {
|
||||||
|
const e1 = parseWhere(root[operator][0]);
|
||||||
|
const e2 = parseWhere(root[operator][1]);
|
||||||
|
|
||||||
|
// eslint-disable-next-line
|
||||||
|
return { '$and' : [
|
||||||
|
e1,
|
||||||
|
e2,
|
||||||
|
] };
|
||||||
|
} else if (operator === 'OR') {
|
||||||
|
const e1 = parseWhere(root[operator][0]);
|
||||||
|
const e2 = parseWhere(root[operator][1]);
|
||||||
|
|
||||||
|
// eslint-disable-next-line
|
||||||
|
return { '$or' : [
|
||||||
|
e1,
|
||||||
|
e2,
|
||||||
|
] };
|
||||||
|
}
|
||||||
|
const field = root[operator][0];
|
||||||
|
const expr = exprMapper[operator];
|
||||||
|
const obj = {};
|
||||||
|
obj[`value.${field}`] = { [expr]: root[operator][1] };
|
||||||
|
|
||||||
|
return obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = parseWhere;
|
|
@ -0,0 +1,64 @@
|
||||||
|
const Parser = require('sql-where-parser');
|
||||||
|
const parser = new Parser();
|
||||||
|
const { errors } = require('arsenal');
|
||||||
|
const objModel = require('arsenal').models.ObjectMD;
|
||||||
|
|
||||||
|
function _validateTree(whereClause, possibleAttributes) {
|
||||||
|
let invalidAttribute;
|
||||||
|
|
||||||
|
function _searchTree(node) {
|
||||||
|
const operator = Object.keys(node)[0];
|
||||||
|
|
||||||
|
if (operator === 'AND') {
|
||||||
|
_searchTree(node[operator][0]);
|
||||||
|
_searchTree(node[operator][1]);
|
||||||
|
} else if (operator === 'OR') {
|
||||||
|
_searchTree(node[operator][0]);
|
||||||
|
_searchTree(node[operator][1]);
|
||||||
|
} else {
|
||||||
|
const field = node[operator][0];
|
||||||
|
|
||||||
|
if (!possibleAttributes[field] &&
|
||||||
|
!field.startsWith('x-amz-meta-')) {
|
||||||
|
invalidAttribute = field;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_searchTree(whereClause);
|
||||||
|
return invalidAttribute;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* validateSearchParams - validate value of ?search= in request
|
||||||
|
* @param {string} searchParams - value of search params in request
|
||||||
|
* which should be jsu sql where clause
|
||||||
|
* For metadata: userMd.`x-amz-meta-color`=\"blue\"
|
||||||
|
* For tags: tags.`x-amz-meta-color`=\"blue\"
|
||||||
|
* For any other attribute: `content-length`=5
|
||||||
|
* @return {undefined | error} undefined if validates or arsenal error if not
|
||||||
|
*/
|
||||||
|
function validateSearchParams(searchParams) {
|
||||||
|
let ast;
|
||||||
|
try {
|
||||||
|
ast = parser.parse(searchParams);
|
||||||
|
} catch (e) {
|
||||||
|
if (e) {
|
||||||
|
return errors.InvalidArgument
|
||||||
|
.customizeDescription('Invalid sql where clause ' +
|
||||||
|
'sent as search query');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const possibleAttributes = objModel.getAttributes();
|
||||||
|
const invalidAttribute = _validateTree(ast, possibleAttributes);
|
||||||
|
if (invalidAttribute) {
|
||||||
|
return {
|
||||||
|
error: errors.InvalidArgument
|
||||||
|
.customizeDescription('Search param ' +
|
||||||
|
`contains unknown attribute: ${invalidAttribute}`) };
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
ast,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = validateSearchParams;
|
|
@ -1,17 +1,16 @@
|
||||||
const querystring = require('querystring');
|
const querystring = require('querystring');
|
||||||
const { errors, versioning, s3middleware } = require('arsenal');
|
const { errors, versioning, s3middleware } = require('arsenal');
|
||||||
|
|
||||||
const constants = require('../../constants');
|
const constants = require('../../constants');
|
||||||
const services = require('../services');
|
const services = require('../services');
|
||||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||||
const escapeForXml = s3middleware.escapeForXml;
|
const escapeForXml = s3middleware.escapeForXml;
|
||||||
const { pushMetric } = require('../utapi/utilities');
|
const { pushMetric } = require('../utapi/utilities');
|
||||||
|
const validateSearchParams = require('../api/apiUtils/bucket/validateSearch');
|
||||||
|
const parseWhere = require('../api/apiUtils/bucket/parseWhere');
|
||||||
const versionIdUtils = versioning.VersionID;
|
const versionIdUtils = versioning.VersionID;
|
||||||
|
|
||||||
// Sample XML response for GET bucket objects:
|
/* Sample XML response for GET bucket objects:
|
||||||
/*
|
|
||||||
<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
||||||
<Name>example-bucket</Name>
|
<Name>example-bucket</Name>
|
||||||
<Prefix></Prefix>
|
<Prefix></Prefix>
|
||||||
|
@ -41,7 +40,6 @@ const versionIdUtils = versioning.VersionID;
|
||||||
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGETVersion.html#RESTBucketGET_Examples
|
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGETVersion.html#RESTBucketGET_Examples
|
||||||
/*
|
/*
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
|
||||||
<ListVersionsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01">
|
<ListVersionsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01">
|
||||||
<Name>bucket</Name>
|
<Name>bucket</Name>
|
||||||
<Prefix>my</Prefix>
|
<Prefix>my</Prefix>
|
||||||
|
@ -203,6 +201,22 @@ function processMasterVersions(bucketName, listParams, list) {
|
||||||
return xml.join('');
|
return xml.join('');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function handleResult(listParams, requestMaxKeys, encoding, authInfo,
|
||||||
|
bucketName, list, corsHeaders, log, callback) {
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
listParams.maxKeys = requestMaxKeys;
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
listParams.encoding = encoding;
|
||||||
|
let res;
|
||||||
|
if (listParams.listingType === 'DelimiterVersions') {
|
||||||
|
res = processVersions(bucketName, listParams, list);
|
||||||
|
} else {
|
||||||
|
res = processMasterVersions(bucketName, listParams, list);
|
||||||
|
}
|
||||||
|
pushMetric('listBucket', log, { authInfo, bucket: bucketName });
|
||||||
|
return callback(null, res, corsHeaders);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* bucketGet - Return list of objects in bucket
|
* bucketGet - Return list of objects in bucket
|
||||||
* @param {AuthInfo} authInfo - Instance of AuthInfo class with
|
* @param {AuthInfo} authInfo - Instance of AuthInfo class with
|
||||||
|
@ -227,6 +241,14 @@ function bucketGet(authInfo, request, log, callback) {
|
||||||
if (Number.isNaN(requestMaxKeys) || requestMaxKeys < 0) {
|
if (Number.isNaN(requestMaxKeys) || requestMaxKeys < 0) {
|
||||||
return callback(errors.InvalidArgument);
|
return callback(errors.InvalidArgument);
|
||||||
}
|
}
|
||||||
|
let validatedAst;
|
||||||
|
if (params.search !== undefined) {
|
||||||
|
const astOrError = validateSearchParams(params.search);
|
||||||
|
if (astOrError.error) {
|
||||||
|
return callback(astOrError.error);
|
||||||
|
}
|
||||||
|
validatedAst = astOrError.ast;
|
||||||
|
}
|
||||||
// AWS only returns 1000 keys even if max keys are greater.
|
// AWS only returns 1000 keys even if max keys are greater.
|
||||||
// Max keys stated in response xml can be greater than actual
|
// Max keys stated in response xml can be greater than actual
|
||||||
// keys returned.
|
// keys returned.
|
||||||
|
@ -259,22 +281,18 @@ function bucketGet(authInfo, request, log, callback) {
|
||||||
listParams.versionIdMarker = params['version-id-marker'] ?
|
listParams.versionIdMarker = params['version-id-marker'] ?
|
||||||
versionIdUtils.decode(params['version-id-marker']) : undefined;
|
versionIdUtils.decode(params['version-id-marker']) : undefined;
|
||||||
}
|
}
|
||||||
|
if (params.search !== undefined) {
|
||||||
|
log.info('performaing search listing', { search: params.search });
|
||||||
|
listParams.mongifiedSearch = parseWhere(validatedAst);
|
||||||
|
}
|
||||||
return services.getObjectListing(bucketName, listParams, log,
|
return services.getObjectListing(bucketName, listParams, log,
|
||||||
(err, list) => {
|
(err, list) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug('error processing request', { error: err });
|
log.debug('error processing request', { error: err });
|
||||||
return callback(err, null, corsHeaders);
|
return callback(err, null, corsHeaders);
|
||||||
}
|
}
|
||||||
listParams.maxKeys = requestMaxKeys;
|
return handleResult(listParams, requestMaxKeys, encoding, authInfo,
|
||||||
listParams.encoding = encoding;
|
bucketName, list, corsHeaders, log, callback);
|
||||||
let res;
|
|
||||||
if (listParams.listingType === 'DelimiterVersions') {
|
|
||||||
res = processVersions(bucketName, listParams, list);
|
|
||||||
} else {
|
|
||||||
res = processMasterVersions(bucketName, listParams, list);
|
|
||||||
}
|
|
||||||
pushMetric('listBucket', log, { authInfo, bucket: bucketName });
|
|
||||||
return callback(null, res, corsHeaders);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
return undefined;
|
return undefined;
|
||||||
|
|
|
@ -117,7 +117,11 @@ function bucketPut(authInfo, request, log, callback) {
|
||||||
}
|
}
|
||||||
const { bucketName } = request;
|
const { bucketName } = request;
|
||||||
|
|
||||||
if (request.bucketName === 'METADATA') {
|
|
||||||
|
if (request.bucketName === 'METADATA'
|
||||||
|
// Note: for this to work with Vault, would need way to set
|
||||||
|
// canonical ID to http://acs.zenko.io/accounts/service/clueso
|
||||||
|
&& !authInfo.isRequesterThisServiceAccount('clueso')) {
|
||||||
return callback(errors.AccessDenied
|
return callback(errors.AccessDenied
|
||||||
.customizeDescription('The bucket METADATA is used ' +
|
.customizeDescription('The bucket METADATA is used ' +
|
||||||
'for internal purposes'));
|
'for internal purposes'));
|
||||||
|
|
|
@ -85,6 +85,7 @@ function objectPut(authInfo, request, streamingV4Params, log, callback) {
|
||||||
request, false, streamingV4Params, log, next);
|
request, false, streamingV4Params, log, next);
|
||||||
},
|
},
|
||||||
], (err, storingResult) => {
|
], (err, storingResult) => {
|
||||||
|
log.info('objectPut waterfall done');
|
||||||
if (err) {
|
if (err) {
|
||||||
return callback(err, responseHeaders);
|
return callback(err, responseHeaders);
|
||||||
}
|
}
|
||||||
|
@ -118,6 +119,7 @@ function objectPut(authInfo, request, streamingV4Params, log, callback) {
|
||||||
newByteLength,
|
newByteLength,
|
||||||
oldByteLength: isVersionedObj ? null : oldByteLength,
|
oldByteLength: isVersionedObj ? null : oldByteLength,
|
||||||
});
|
});
|
||||||
|
log.info('objectPut pushMetric done');
|
||||||
return callback(null, responseHeaders);
|
return callback(null, responseHeaders);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,17 +1,27 @@
|
||||||
|
const serviceAccountPrefix =
|
||||||
|
require('arsenal').constants.zenkoServiceAccount;
|
||||||
|
|
||||||
/** build simple authdata with only one account
|
/** build simple authdata with only one account
|
||||||
* @param {string} accessKey - account's accessKey
|
* @param {string} accessKey - account's accessKey
|
||||||
* @param {string} secretKey - account's secretKey
|
* @param {string} secretKey - account's secretKey
|
||||||
|
* @param {string} [serviceName] - service name to use to generate can id
|
||||||
* @return {object} authdata - authdata with account's accessKey and secretKey
|
* @return {object} authdata - authdata with account's accessKey and secretKey
|
||||||
*/
|
*/
|
||||||
function buildAuthDataAccount(accessKey, secretKey) {
|
function buildAuthDataAccount(accessKey, secretKey, serviceName) {
|
||||||
|
// TODO: remove specific check for clueso and generate unique
|
||||||
|
// canonical id's for accounts
|
||||||
|
const canonicalID = serviceName && serviceName === 'clueso' ?
|
||||||
|
`${serviceAccountPrefix}/${serviceName}` : '12349df900b949e' +
|
||||||
|
'55d96a1e698fbacedfd6e09d98eacf8f8d52' +
|
||||||
|
'18e7cd47qwer';
|
||||||
|
const shortid = '123456789012';
|
||||||
return {
|
return {
|
||||||
accounts: [{
|
accounts: [{
|
||||||
name: 'CustomAccount',
|
name: 'CustomAccount',
|
||||||
email: 'customaccount1@setbyenv.com',
|
email: 'customaccount1@setbyenv.com',
|
||||||
arn: 'arn:aws:iam::123456789012:root',
|
arn: `arn:aws:iam::${shortid}:root`,
|
||||||
canonicalID: '12349df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d52' +
|
canonicalID,
|
||||||
'18e7cd47qwer',
|
shortid,
|
||||||
shortid: '123456789012',
|
|
||||||
keys: [{
|
keys: [{
|
||||||
access: accessKey,
|
access: accessKey,
|
||||||
secret: secretKey,
|
secret: secretKey,
|
||||||
|
|
|
@ -149,14 +149,30 @@ class AzureClient {
|
||||||
.customizeDescription('Error returned from ' +
|
.customizeDescription('Error returned from ' +
|
||||||
`Azure: ${err.message}`));
|
`Azure: ${err.message}`));
|
||||||
}
|
}
|
||||||
return callback(null, azureKey);
|
// XXX RACE CONDITION?
|
||||||
|
return this._errorWrapper('put', 'createBlobSnapshot',
|
||||||
|
[this._azureContainerName, azureKey, options,
|
||||||
|
(err, snapshotName) => {
|
||||||
|
if (err) {
|
||||||
|
logHelper(log, 'error', 'err from Azure PUT ' +
|
||||||
|
'data backend', err, this._dataStoreName);
|
||||||
|
return callback(errors.ServiceUnavailable
|
||||||
|
.customizeDescription('Error returned ' +
|
||||||
|
`from Azure: ${err.message}`));
|
||||||
|
}
|
||||||
|
process.stdout.write('GOT AZURE VERSION ID' +
|
||||||
|
`${snapshotName}\n`);
|
||||||
|
return callback(null, azureKey, snapshotName);
|
||||||
|
}], log, callback);
|
||||||
}], log, callback);
|
}], log, callback);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
head(objectGetInfo, reqUids, callback) {
|
head(objectGetInfo, reqUids, callback) {
|
||||||
const log = createLogger(reqUids);
|
const log = createLogger(reqUids);
|
||||||
const { key, azureStreamingOptions } = objectGetInfo;
|
const { key, azureStreamingOptions, dataStoreVersionId } =
|
||||||
|
objectGetInfo;
|
||||||
|
azureStreamingOptions.snapshotId = dataStoreVersionId;
|
||||||
return this._errorWrapper('head', 'getBlobProperties',
|
return this._errorWrapper('head', 'getBlobProperties',
|
||||||
[this._azureContainerName, key, azureStreamingOptions,
|
[this._azureContainerName, key, azureStreamingOptions,
|
||||||
err => {
|
err => {
|
||||||
|
@ -182,7 +198,8 @@ class AzureClient {
|
||||||
get(objectGetInfo, range, reqUids, callback) {
|
get(objectGetInfo, range, reqUids, callback) {
|
||||||
const log = createLogger(reqUids);
|
const log = createLogger(reqUids);
|
||||||
// for backwards compatibility
|
// for backwards compatibility
|
||||||
const { key, response, azureStreamingOptions } = objectGetInfo;
|
const { key, response, azureStreamingOptions,
|
||||||
|
dataStoreVersionId } = objectGetInfo;
|
||||||
let streamingOptions;
|
let streamingOptions;
|
||||||
if (azureStreamingOptions) {
|
if (azureStreamingOptions) {
|
||||||
// option coming from api.get()
|
// option coming from api.get()
|
||||||
|
@ -193,6 +210,8 @@ class AzureClient {
|
||||||
const rangeEnd = range[1] ? range[1].toString() : undefined;
|
const rangeEnd = range[1] ? range[1].toString() : undefined;
|
||||||
streamingOptions = { rangeStart, rangeEnd };
|
streamingOptions = { rangeStart, rangeEnd };
|
||||||
}
|
}
|
||||||
|
process.stdout.write('AZURE VERSION GET', dataStoreVersionId);
|
||||||
|
streamingOptions.snapshotId = dataStoreVersionId;
|
||||||
this._errorWrapper('get', 'getBlobToStream',
|
this._errorWrapper('get', 'getBlobToStream',
|
||||||
[this._azureContainerName, key, response, streamingOptions,
|
[this._azureContainerName, key, response, streamingOptions,
|
||||||
err => {
|
err => {
|
||||||
|
|
|
@ -6,6 +6,7 @@ const async = require('async');
|
||||||
|
|
||||||
const { config } = require('../Config');
|
const { config } = require('../Config');
|
||||||
const parseLC = require('./locationConstraintParser');
|
const parseLC = require('./locationConstraintParser');
|
||||||
|
const DataFileBackend = require('./file/backend');
|
||||||
|
|
||||||
const { checkExternalBackend } = require('./external/utils');
|
const { checkExternalBackend } = require('./external/utils');
|
||||||
const { externalBackendHealthCheckInterval } = require('../../constants');
|
const { externalBackendHealthCheckInterval } = require('../../constants');
|
||||||
|
@ -291,6 +292,10 @@ const multipleBackendGateway = {
|
||||||
}
|
}
|
||||||
return cb();
|
return cb();
|
||||||
},
|
},
|
||||||
|
|
||||||
|
getDiskUsage: (reqUids, callback) => {
|
||||||
|
new DataFileBackend().getDiskUsage(reqUids, callback);
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = multipleBackendGateway;
|
module.exports = multipleBackendGateway;
|
||||||
|
|
|
@ -0,0 +1,456 @@
|
||||||
|
const arsenal = require('arsenal');
|
||||||
|
const async = require('async');
|
||||||
|
const forge = require('node-forge');
|
||||||
|
const request = require('request');
|
||||||
|
|
||||||
|
const { buildAuthDataAccount } = require('./auth/in_memory/builder');
|
||||||
|
const metadata = require('./metadata/wrapper');
|
||||||
|
const _config = require('./Config').config;
|
||||||
|
const logger = require('./utilities/logger');
|
||||||
|
|
||||||
|
const managementEndpointRoot =
|
||||||
|
process.env.MANAGEMENT_ENDPOINT ||
|
||||||
|
'https://api.zenko.io';
|
||||||
|
|
||||||
|
const managementEndpoint = `${managementEndpointRoot}/api/v1/instance`;
|
||||||
|
|
||||||
|
const managementDatabaseName = 'PENSIEVE';
|
||||||
|
const initRemoteManagementRetryDelay = 10000;
|
||||||
|
const pushReportDelay = 30000;
|
||||||
|
const pullConfigurationOverlayDelay = 10000;
|
||||||
|
|
||||||
|
const tokenRotationDelay = 3600 * 24 * 7 * 1000; // 7 days
|
||||||
|
const tokenConfigurationKey = 'auth/zenko/remote-management-token';
|
||||||
|
const latestOverlayVersionKey = 'configuration/overlay-version';
|
||||||
|
const replicatorEndpoint = 'zenko-cloudserver-replicator';
|
||||||
|
|
||||||
|
function decryptSecret(instanceCredentials, secret) {
|
||||||
|
// XXX don't forget to use u.encryptionKeyVersion if present
|
||||||
|
const privateKey = forge.pki.privateKeyFromPem(
|
||||||
|
instanceCredentials.privateKey);
|
||||||
|
const encryptedSecretKey = forge.util.decode64(secret);
|
||||||
|
return privateKey.decrypt(encryptedSecretKey, 'RSA-OAEP', {
|
||||||
|
md: forge.md.sha256.create(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function getStoredCredentials(instanceId, log, callback) {
|
||||||
|
metadata.getObjectMD(managementDatabaseName, tokenConfigurationKey, {},
|
||||||
|
log, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
function patchConfiguration(instanceId, newConf, log, cb) {
|
||||||
|
if (newConf.version === undefined) {
|
||||||
|
log.debug('no remote configuration created yet');
|
||||||
|
return process.nextTick(cb, null, newConf);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (_config.overlayVersion !== undefined &&
|
||||||
|
newConf.version <= _config.overlayVersion) {
|
||||||
|
log.debug('configuration version already applied',
|
||||||
|
{ configurationVersion: newConf.version });
|
||||||
|
return process.nextTick(cb, null, newConf);
|
||||||
|
}
|
||||||
|
|
||||||
|
return getStoredCredentials(instanceId, log, (err, creds) => {
|
||||||
|
if (err) {
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
const accounts = [];
|
||||||
|
if (newConf.users) {
|
||||||
|
newConf.users.forEach(u => {
|
||||||
|
if (u.secretKey && u.secretKey.length > 0) {
|
||||||
|
const secretKey = decryptSecret(creds, u.secretKey);
|
||||||
|
// accountType will be service-replication or service-clueso
|
||||||
|
let serviceName;
|
||||||
|
if (u.accountType && u.accountType.startsWith('service-')) {
|
||||||
|
serviceName = u.accountType.split('-')[1];
|
||||||
|
}
|
||||||
|
const newAccount = buildAuthDataAccount(
|
||||||
|
u.accessKey, secretKey, serviceName);
|
||||||
|
accounts.push(newAccount.accounts[0]);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const restEndpoints = Object.assign({}, _config.restEndpoints);
|
||||||
|
if (newConf.endpoints) {
|
||||||
|
newConf.endpoints.forEach(e => {
|
||||||
|
restEndpoints[e.hostname] = e.locationName;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!restEndpoints[replicatorEndpoint]) {
|
||||||
|
restEndpoints[replicatorEndpoint] = 'us-east-1';
|
||||||
|
}
|
||||||
|
|
||||||
|
const locations = {};
|
||||||
|
if (newConf.locations) {
|
||||||
|
// Object.values() is apparently too recent
|
||||||
|
Object.keys(newConf.locations || {}).forEach(k => {
|
||||||
|
const l = newConf.locations[k];
|
||||||
|
const location = {};
|
||||||
|
switch (l.locationType) {
|
||||||
|
case 'location-mem-v1':
|
||||||
|
location.type = 'mem';
|
||||||
|
break;
|
||||||
|
case 'location-file-v1':
|
||||||
|
location.type = 'file';
|
||||||
|
break;
|
||||||
|
case 'location-azure-v1':
|
||||||
|
location.type = 'azure';
|
||||||
|
if (l.details.secretKey && l.details.secretKey.length > 0) {
|
||||||
|
location.details = {
|
||||||
|
bucketMatch: l.details.bucketMatch,
|
||||||
|
azureStorageEndpoint: l.details.endpoint,
|
||||||
|
azureStorageAccountName: l.details.accessKey,
|
||||||
|
azureStorageAccessKey: decryptSecret(creds,
|
||||||
|
l.details.secretKey),
|
||||||
|
azureContainerName: l.details.bucketName,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'location-do-spaces-v1':
|
||||||
|
case 'location-aws-s3-v1':
|
||||||
|
case 'location-wasabi-v1':
|
||||||
|
location.type = 'aws_s3';
|
||||||
|
if (l.details.secretKey && l.details.secretKey.length > 0) {
|
||||||
|
location.details = {
|
||||||
|
credentials: {
|
||||||
|
accessKey: l.details.accessKey,
|
||||||
|
secretKey: decryptSecret(creds,
|
||||||
|
l.details.secretKey),
|
||||||
|
},
|
||||||
|
bucketName: l.details.bucketName,
|
||||||
|
bucketMatch: l.details.bucketMatch,
|
||||||
|
serverSideEncryption:
|
||||||
|
Boolean(l.details.serverSideEncryption),
|
||||||
|
awsEndpoint: l.details.endpoint ||
|
||||||
|
's3.amazonaws.com',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
log.info('unknown location type', { locationType:
|
||||||
|
l.locationType });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
location.legacyAwsBehavior = Boolean(l.legacyAwsBehavior);
|
||||||
|
locations[l.name] = location;
|
||||||
|
});
|
||||||
|
try {
|
||||||
|
_config.setLocationConstraints(locations);
|
||||||
|
} catch (error) {
|
||||||
|
log.info('could not apply configuration version location ' +
|
||||||
|
'constraints', { error });
|
||||||
|
return cb(error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_config.setAuthDataAccounts(accounts);
|
||||||
|
_config.setRestEndpoints(restEndpoints);
|
||||||
|
_config.overlayVersion = newConf.version;
|
||||||
|
|
||||||
|
log.info('applied configuration version',
|
||||||
|
{ configurationVersion: _config.overlayVersion });
|
||||||
|
|
||||||
|
return cb(null, newConf);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function loadRemoteOverlay(instanceId, remoteToken, cachedOverlay, log, cb) {
|
||||||
|
log.debug('loading remote overlay');
|
||||||
|
const opts = {
|
||||||
|
headers: {
|
||||||
|
'x-instance-authentication-token': remoteToken,
|
||||||
|
'x-scal-request-id': log.getSerializedUids(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
request(`${managementEndpoint}/${instanceId}/config/overlay`, opts,
|
||||||
|
(error, response, body) => {
|
||||||
|
if (error) {
|
||||||
|
return cb(error);
|
||||||
|
}
|
||||||
|
if (response.statusCode === 200) {
|
||||||
|
return cb(null, cachedOverlay, body);
|
||||||
|
}
|
||||||
|
if (response.statusCode === 404) {
|
||||||
|
return cb(null, cachedOverlay, {});
|
||||||
|
}
|
||||||
|
return cb(arsenal.errors.AccessForbidden, cachedOverlay, {});
|
||||||
|
}).json();
|
||||||
|
}
|
||||||
|
|
||||||
|
function loadCachedOverlay(log, callback) {
|
||||||
|
return metadata.getObjectMD(managementDatabaseName,
|
||||||
|
latestOverlayVersionKey, {}, log, (err, version) => {
|
||||||
|
if (err) {
|
||||||
|
if (err.NoSuchKey) {
|
||||||
|
return process.nextTick(callback, null, {});
|
||||||
|
}
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return metadata.getObjectMD(managementDatabaseName,
|
||||||
|
`configuration/overlay/${version}`, {}, log, (err, conf) => {
|
||||||
|
if (err) {
|
||||||
|
if (err.NoSuchKey) {
|
||||||
|
return process.nextTick(callback, null, {});
|
||||||
|
}
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return callback(null, conf);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function overlayHasVersion(overlay) {
|
||||||
|
return overlay && overlay.version !== undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
function remoteOverlayIsNewer(cachedOverlay, remoteOverlay) {
|
||||||
|
return (overlayHasVersion(remoteOverlay) &&
|
||||||
|
(!overlayHasVersion(cachedOverlay) ||
|
||||||
|
remoteOverlay.version > cachedOverlay.version));
|
||||||
|
}
|
||||||
|
|
||||||
|
function saveConfigurationVersion(cachedOverlay, remoteOverlay, log, cb) {
|
||||||
|
if (remoteOverlayIsNewer(cachedOverlay, remoteOverlay)) {
|
||||||
|
const objName = `configuration/overlay/${remoteOverlay.version}`;
|
||||||
|
metadata.putObjectMD(managementDatabaseName, objName, remoteOverlay,
|
||||||
|
{}, log, error => {
|
||||||
|
if (error) {
|
||||||
|
log.error('could not save configuration version',
|
||||||
|
{ configurationVersion: remoteOverlay.version });
|
||||||
|
}
|
||||||
|
metadata.putObjectMD(managementDatabaseName,
|
||||||
|
latestOverlayVersionKey, remoteOverlay.version, {}, log,
|
||||||
|
error => cb(error, remoteOverlay));
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
log.debug('no remote configuration to cache yet');
|
||||||
|
process.nextTick(cb, null, remoteOverlay);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO save only after successful patch
|
||||||
|
function applyConfigurationOverlay(instanceId, remoteToken, log, cb) {
|
||||||
|
async.waterfall([
|
||||||
|
wcb => loadCachedOverlay(log, wcb),
|
||||||
|
(cachedOverlay, wcb) => patchConfiguration(instanceId, cachedOverlay,
|
||||||
|
log, wcb),
|
||||||
|
(cachedOverlay, wcb) =>
|
||||||
|
loadRemoteOverlay(instanceId, remoteToken, cachedOverlay, log, wcb),
|
||||||
|
(cachedOverlay, remoteOverlay, wcb) =>
|
||||||
|
saveConfigurationVersion(cachedOverlay, remoteOverlay, log, wcb),
|
||||||
|
(remoteOverlay, wcb) => patchConfiguration(instanceId, remoteOverlay,
|
||||||
|
log, wcb),
|
||||||
|
], error => {
|
||||||
|
if (error) {
|
||||||
|
log.error('could not apply managed configuration', { error });
|
||||||
|
}
|
||||||
|
if (cb) {
|
||||||
|
cb(null, instanceId, remoteToken);
|
||||||
|
}
|
||||||
|
setTimeout(applyConfigurationOverlay, pullConfigurationOverlayDelay,
|
||||||
|
instanceId, remoteToken, logger.newRequestLogger());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function getStats() {
|
||||||
|
const fromURL = `http://localhost:${_config.port}/_/report`;
|
||||||
|
const fromOptions = {
|
||||||
|
headers: {
|
||||||
|
'x-scal-report-token': process.env.REPORT_TOKEN,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
return request(fromURL, fromOptions).json();
|
||||||
|
}
|
||||||
|
|
||||||
|
function postStats(instanceId, remoteToken, next) {
|
||||||
|
const toURL = `${managementEndpoint}/${instanceId}/stats`;
|
||||||
|
const toOptions = {
|
||||||
|
headers: {
|
||||||
|
'x-instance-authentication-token': remoteToken,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const toCallback = (err, response, body) => {
|
||||||
|
if (err) {
|
||||||
|
process.stdout.write(`STAT PUSH ERR ${err}\n`);
|
||||||
|
}
|
||||||
|
if (response && response.statusCode !== 201) {
|
||||||
|
process.stdout.write(`STAT PUSH ERR ${response.statusCode} ` +
|
||||||
|
`${body}\n`);
|
||||||
|
}
|
||||||
|
if (next) {
|
||||||
|
next(null, instanceId, remoteToken);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return request.post(toURL, toOptions, toCallback).json();
|
||||||
|
}
|
||||||
|
|
||||||
|
function pushStats(instanceId, remoteToken, next) {
|
||||||
|
getStats().pipe(postStats(instanceId, remoteToken, next));
|
||||||
|
const fromURL = `http://localhost:${_config.port}/_/report`;
|
||||||
|
const fromOptions = {
|
||||||
|
url: fromURL,
|
||||||
|
headers: {
|
||||||
|
'x-scal-report-token': process.env.REPORT_TOKEN,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
request(fromOptions, (error, response, body) => {
|
||||||
|
if (!error && response.statusCode == 200) {
|
||||||
|
var info = JSON.parse(body);
|
||||||
|
console.log('systemStats', info.systemStats);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
setTimeout(pushStats, pushReportDelay, instanceId, remoteToken);
|
||||||
|
}
|
||||||
|
|
||||||
|
function issueCredentials(instanceId, log, callback) {
|
||||||
|
log.info('registering with API to get token');
|
||||||
|
|
||||||
|
const keyPair = forge.pki.rsa.generateKeyPair({ bits: 2048, e: 0x10001 });
|
||||||
|
const privateKey = forge.pki.privateKeyToPem(keyPair.privateKey);
|
||||||
|
const publicKey = forge.pki.publicKeyToPem(keyPair.publicKey);
|
||||||
|
|
||||||
|
const postData = {
|
||||||
|
publicKey,
|
||||||
|
};
|
||||||
|
|
||||||
|
request.post(`${managementEndpoint}/${instanceId}/register`,
|
||||||
|
(error, response, body) => {
|
||||||
|
if (error) {
|
||||||
|
return callback(error);
|
||||||
|
}
|
||||||
|
if (response.statusCode !== 201) {
|
||||||
|
log.error('could not register instance', {
|
||||||
|
statusCode: response.statusCode,
|
||||||
|
});
|
||||||
|
return callback(arsenal.errors.InternalError);
|
||||||
|
}
|
||||||
|
/* eslint-disable no-param-reassign */
|
||||||
|
body.privateKey = privateKey;
|
||||||
|
/* eslint-enable no-param-reassign */
|
||||||
|
return callback(null, body);
|
||||||
|
}).json(postData);
|
||||||
|
}
|
||||||
|
|
||||||
|
function confirmInstanceCredentials(instanceId, creds, log, callback) {
|
||||||
|
const opts = {
|
||||||
|
headers: {
|
||||||
|
'x-instance-authentication-token': creds.token,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const postData = {
|
||||||
|
serial: creds.serial || 0,
|
||||||
|
publicKey: creds.publicKey,
|
||||||
|
};
|
||||||
|
request.post(`${managementEndpoint}/${instanceId}/confirm`,
|
||||||
|
opts, (error, response) => {
|
||||||
|
if (error) {
|
||||||
|
return callback(error);
|
||||||
|
}
|
||||||
|
if (response.statusCode === 200) {
|
||||||
|
return callback(null, instanceId, creds.token);
|
||||||
|
}
|
||||||
|
return callback(arsenal.errors.InternalError);
|
||||||
|
}).json(postData);
|
||||||
|
}
|
||||||
|
|
||||||
|
function initManagementCredentials(instanceId, log, callback) {
|
||||||
|
getStoredCredentials(instanceId, log, (error, value) => {
|
||||||
|
if (error) {
|
||||||
|
if (error.NoSuchKey) {
|
||||||
|
return issueCredentials(instanceId, log, (error, value) => {
|
||||||
|
if (error) {
|
||||||
|
log.error('could not issue token', { error });
|
||||||
|
return callback(error);
|
||||||
|
}
|
||||||
|
log.debug('saving token');
|
||||||
|
return metadata.putObjectMD(managementDatabaseName,
|
||||||
|
tokenConfigurationKey, value, {}, log, error => {
|
||||||
|
if (error) {
|
||||||
|
log.error('could not save token',
|
||||||
|
{ error });
|
||||||
|
return callback(error);
|
||||||
|
}
|
||||||
|
log.info('saved token locally, ' +
|
||||||
|
'confirming instance');
|
||||||
|
return confirmInstanceCredentials(
|
||||||
|
instanceId, value, log, callback);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
log.debug('could not get token', { error });
|
||||||
|
return callback(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info('returning existing token');
|
||||||
|
if (Date.now() - value.issueDate > tokenRotationDelay) {
|
||||||
|
log.warn('management API token is too old, should re-issue');
|
||||||
|
}
|
||||||
|
|
||||||
|
return callback(null, instanceId, value.token);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function initManagementDatabase(log, callback) {
|
||||||
|
// XXX choose proper owner names
|
||||||
|
const md = new arsenal.models.BucketInfo(managementDatabaseName, 'owner',
|
||||||
|
'owner display name', new Date().toJSON());
|
||||||
|
|
||||||
|
metadata.createBucket(managementDatabaseName, md, log, error => {
|
||||||
|
if (error) {
|
||||||
|
if (error.BucketAlreadyExists) {
|
||||||
|
log.info('created management database');
|
||||||
|
return callback();
|
||||||
|
}
|
||||||
|
log.error('could not initialize management database',
|
||||||
|
{ error });
|
||||||
|
return callback(error);
|
||||||
|
}
|
||||||
|
log.info('initialized management database');
|
||||||
|
return callback();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function initManagement(log) {
|
||||||
|
if ((process.env.REMOTE_MANAGEMENT_DISABLE &&
|
||||||
|
process.env.REMOTE_MANAGEMENT_DISABLE !== '0')
|
||||||
|
|| process.env.S3BACKEND === 'mem') {
|
||||||
|
log.info('remote management disabled');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
async.waterfall([
|
||||||
|
cb => initManagementDatabase(log, cb),
|
||||||
|
cb => metadata.getUUID(log, cb),
|
||||||
|
(instanceId, cb) => initManagementCredentials(instanceId, log, cb),
|
||||||
|
(instanceId, token, cb) =>
|
||||||
|
applyConfigurationOverlay(instanceId, token, log, cb),
|
||||||
|
(instanceId, token, cb) => pushStats(instanceId, token, cb),
|
||||||
|
(instanceId, token, cb) => {
|
||||||
|
metadata.notifyBucketChange(() => {
|
||||||
|
pushStats(instanceId, token);
|
||||||
|
});
|
||||||
|
process.nextTick(cb);
|
||||||
|
},
|
||||||
|
], error => {
|
||||||
|
if (error) {
|
||||||
|
log.error('could not initialize remote management, retrying later',
|
||||||
|
{ error });
|
||||||
|
setTimeout(initManagement,
|
||||||
|
initRemoteManagementRetryDelay,
|
||||||
|
logger.newRequestLogger());
|
||||||
|
} else {
|
||||||
|
metadata.getUUID(log, (err, instanceId) => {
|
||||||
|
log.info(`this deployment's Instance ID is ${instanceId}`);
|
||||||
|
log.end('management init done');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
initManagement,
|
||||||
|
};
|
|
@ -1,5 +1,6 @@
|
||||||
const cluster = require('cluster');
|
const cluster = require('cluster');
|
||||||
const arsenal = require('arsenal');
|
const arsenal = require('arsenal');
|
||||||
|
const async = require('async');
|
||||||
|
|
||||||
const logger = require('../../utilities/logger');
|
const logger = require('../../utilities/logger');
|
||||||
const BucketInfo = arsenal.models.BucketInfo;
|
const BucketInfo = arsenal.models.BucketInfo;
|
||||||
|
@ -7,11 +8,13 @@ const constants = require('../../../constants');
|
||||||
const { config } = require('../../Config');
|
const { config } = require('../../Config');
|
||||||
|
|
||||||
const errors = arsenal.errors;
|
const errors = arsenal.errors;
|
||||||
const MetadataFileClient = arsenal.storage.metadata.MetadataFileClient;
|
const MetadataFileClient = arsenal.storage.metadata.file.MetadataFileClient;
|
||||||
const versionSep = arsenal.versioning.VersioningConstants.VersionId.Separator;
|
const versionSep = arsenal.versioning.VersioningConstants.VersionId.Separator;
|
||||||
|
|
||||||
const METASTORE = '__metastore';
|
const METASTORE = '__metastore';
|
||||||
|
|
||||||
|
const itemScanRefreshDelay = 1000 * 30 * 60; // 30 minutes
|
||||||
|
|
||||||
class BucketFileInterface {
|
class BucketFileInterface {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -27,16 +30,20 @@ class BucketFileInterface {
|
||||||
if (params && params.noDbOpen) {
|
if (params && params.noDbOpen) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
this.mdDB = this.mdClient.openDB(err => {
|
this.mdClient.openDB((err, dbClient) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
|
this.mdDB = dbClient;
|
||||||
// the metastore sublevel is used to store bucket attributes
|
// the metastore sublevel is used to store bucket attributes
|
||||||
this.metastore = this.mdDB.openSub(METASTORE);
|
this.metastore = this.mdDB.openSub(METASTORE);
|
||||||
if (cluster.isMaster) {
|
if (cluster.isMaster) {
|
||||||
this.setupMetadataServer();
|
this.setupMetadataServer();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
this.lastItemScanTime = null;
|
||||||
|
this.lastItemScanResult = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
setupMetadataServer() {
|
setupMetadataServer() {
|
||||||
|
@ -89,6 +96,7 @@ class BucketFileInterface {
|
||||||
if (err === undefined) {
|
if (err === undefined) {
|
||||||
return cb(errors.BucketAlreadyExists);
|
return cb(errors.BucketAlreadyExists);
|
||||||
}
|
}
|
||||||
|
this.lastItemScanTime = null;
|
||||||
this.putBucketAttributes(bucketName,
|
this.putBucketAttributes(bucketName,
|
||||||
bucketMD,
|
bucketMD,
|
||||||
log, cb);
|
log, cb);
|
||||||
|
@ -180,6 +188,7 @@ class BucketFileInterface {
|
||||||
logObj);
|
logObj);
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
|
this.lastItemScanTime = null;
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
return undefined;
|
return undefined;
|
||||||
|
@ -324,6 +333,11 @@ class BucketFileInterface {
|
||||||
}
|
}
|
||||||
|
|
||||||
countItems(log, cb) {
|
countItems(log, cb) {
|
||||||
|
if (this.lastItemScanTime !== null &&
|
||||||
|
(Date.now() - this.lastItemScanTime) <= itemScanRefreshDelay) {
|
||||||
|
return process.nextTick(cb, null, this.lastItemScanResult);
|
||||||
|
}
|
||||||
|
|
||||||
const params = {};
|
const params = {};
|
||||||
const extension = new arsenal.algorithms.list.Basic(params, log);
|
const extension = new arsenal.algorithms.list.Basic(params, log);
|
||||||
const requestParams = extension.genMDParams();
|
const requestParams = extension.genMDParams();
|
||||||
|
@ -332,6 +346,7 @@ class BucketFileInterface {
|
||||||
objects: 0,
|
objects: 0,
|
||||||
versions: 0,
|
versions: 0,
|
||||||
buckets: 0,
|
buckets: 0,
|
||||||
|
bucketList: [],
|
||||||
};
|
};
|
||||||
let cbDone = false;
|
let cbDone = false;
|
||||||
|
|
||||||
|
@ -344,9 +359,12 @@ class BucketFileInterface {
|
||||||
if (!e.includes(METASTORE)) {
|
if (!e.includes(METASTORE)) {
|
||||||
if (e.includes(constants.usersBucket)) {
|
if (e.includes(constants.usersBucket)) {
|
||||||
res.buckets++;
|
res.buckets++;
|
||||||
|
res.bucketList.push({
|
||||||
|
name: e.split(constants.splitter)[1],
|
||||||
|
});
|
||||||
} else if (e.includes(versionSep)) {
|
} else if (e.includes(versionSep)) {
|
||||||
res.versions++;
|
res.versions++;
|
||||||
} else {
|
} else if (!e.includes('..recordLogs#s3-recordlog')) {
|
||||||
res.objects++;
|
res.objects++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -366,7 +384,25 @@ class BucketFileInterface {
|
||||||
.on('end', () => {
|
.on('end', () => {
|
||||||
if (!cbDone) {
|
if (!cbDone) {
|
||||||
cbDone = true;
|
cbDone = true;
|
||||||
return cb(null, res);
|
async.eachSeries(res.bucketList, (bucket, cb) => {
|
||||||
|
this.getBucketAttributes(bucket.name, log,
|
||||||
|
(err, bucketInfo) => {
|
||||||
|
if (err) {
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
/* eslint-disable no-param-reassign */
|
||||||
|
bucket.location =
|
||||||
|
bucketInfo.getLocationConstraint();
|
||||||
|
/* eslint-enable no-param-reassign */
|
||||||
|
return cb();
|
||||||
|
});
|
||||||
|
}, err => {
|
||||||
|
if (!err) {
|
||||||
|
this.lastItemScanTime = Date.now();
|
||||||
|
this.lastItemScanResult = res;
|
||||||
|
}
|
||||||
|
return cb(err, res);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
});
|
});
|
||||||
|
|
|
@ -18,6 +18,7 @@ try {
|
||||||
|
|
||||||
let client;
|
let client;
|
||||||
let implName;
|
let implName;
|
||||||
|
let bucketNotificationHook;
|
||||||
|
|
||||||
if (config.backends.metadata === 'mem') {
|
if (config.backends.metadata === 'mem') {
|
||||||
client = inMemory;
|
client = inMemory;
|
||||||
|
@ -30,8 +31,7 @@ if (config.backends.metadata === 'mem') {
|
||||||
implName = 'bucketclient';
|
implName = 'bucketclient';
|
||||||
} else if (config.backends.metadata === 'mongodb') {
|
} else if (config.backends.metadata === 'mongodb') {
|
||||||
client = new MongoClientInterface({
|
client = new MongoClientInterface({
|
||||||
host: config.mongodb.host,
|
hosts: config.mongodb.hosts,
|
||||||
port: config.mongodb.port,
|
|
||||||
writeConcern: config.mongodb.writeConcern,
|
writeConcern: config.mongodb.writeConcern,
|
||||||
replicaSet: config.mongodb.replicaSet,
|
replicaSet: config.mongodb.replicaSet,
|
||||||
readPreference: config.mongodb.readPreference,
|
readPreference: config.mongodb.readPreference,
|
||||||
|
@ -64,6 +64,9 @@ const metadata = {
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
log.trace('bucket created in metadata');
|
log.trace('bucket created in metadata');
|
||||||
|
if (bucketNotificationHook) {
|
||||||
|
setTimeout(bucketNotificationHook);
|
||||||
|
}
|
||||||
return cb(err);
|
return cb(err);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
@ -100,6 +103,9 @@ const metadata = {
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
log.debug('Deleted bucket from Metadata');
|
log.debug('Deleted bucket from Metadata');
|
||||||
|
if (bucketNotificationHook) {
|
||||||
|
setTimeout(bucketNotificationHook);
|
||||||
|
}
|
||||||
return cb(err);
|
return cb(err);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
@ -256,6 +262,10 @@ const metadata = {
|
||||||
}
|
}
|
||||||
return client.countItems(log, cb);
|
return client.countItems(log, cb);
|
||||||
},
|
},
|
||||||
|
|
||||||
|
notifyBucketChange: cb => {
|
||||||
|
bucketNotificationHook = cb;
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
module.exports = metadata;
|
module.exports = metadata;
|
||||||
|
|
|
@ -235,6 +235,7 @@ function putData(request, response, bucketInfo, objMd, log, callback) {
|
||||||
owner: canonicalID,
|
owner: canonicalID,
|
||||||
namespace: NAMESPACE,
|
namespace: NAMESPACE,
|
||||||
objectKey: request.objectKey,
|
objectKey: request.objectKey,
|
||||||
|
metaHeaders: {},
|
||||||
};
|
};
|
||||||
const payloadLen = parseInt(request.headers['content-length'], 10);
|
const payloadLen = parseInt(request.headers['content-length'], 10);
|
||||||
const backendInfoObj = locationConstraintCheck(
|
const backendInfoObj = locationConstraintCheck(
|
||||||
|
|
|
@ -11,6 +11,7 @@ const _config = require('./Config').config;
|
||||||
const { blacklistedPrefixes } = require('../constants');
|
const { blacklistedPrefixes } = require('../constants');
|
||||||
const api = require('./api/api');
|
const api = require('./api/api');
|
||||||
const data = require('./data/wrapper');
|
const data = require('./data/wrapper');
|
||||||
|
const { initManagement } = require('./management');
|
||||||
|
|
||||||
const routes = arsenal.s3routes.routes;
|
const routes = arsenal.s3routes.routes;
|
||||||
const websiteEndpoints = _config.websiteEndpoints;
|
const websiteEndpoints = _config.websiteEndpoints;
|
||||||
|
@ -36,6 +37,7 @@ const STATS_INTERVAL = 5; // 5 seconds
|
||||||
const STATS_EXPIRY = 30; // 30 seconds
|
const STATS_EXPIRY = 30; // 30 seconds
|
||||||
const statsClient = new StatsClient(localCacheClient, STATS_INTERVAL,
|
const statsClient = new StatsClient(localCacheClient, STATS_INTERVAL,
|
||||||
STATS_EXPIRY);
|
STATS_EXPIRY);
|
||||||
|
const enableRemoteManagement = true;
|
||||||
|
|
||||||
class S3Server {
|
class S3Server {
|
||||||
/**
|
/**
|
||||||
|
@ -69,6 +71,13 @@ class S3Server {
|
||||||
routeRequest(req, res) {
|
routeRequest(req, res) {
|
||||||
// disable nagle algorithm
|
// disable nagle algorithm
|
||||||
req.socket.setNoDelay();
|
req.socket.setNoDelay();
|
||||||
|
|
||||||
|
// use proxied hostname if needed
|
||||||
|
if (req.headers['x-target-host']) {
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
req.headers.host = req.headers['x-target-host'];
|
||||||
|
}
|
||||||
|
|
||||||
const params = {
|
const params = {
|
||||||
api,
|
api,
|
||||||
internalHandlers,
|
internalHandlers,
|
||||||
|
@ -131,6 +140,14 @@ class S3Server {
|
||||||
} else {
|
} else {
|
||||||
this.server.listen(port);
|
this.server.listen(port);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO this should wait for metadata healthcheck to be ok
|
||||||
|
// TODO only do this in cluster master
|
||||||
|
setTimeout(() => {
|
||||||
|
if (enableRemoteManagement) {
|
||||||
|
initManagement(logger.newRequestLogger());
|
||||||
|
}
|
||||||
|
}, 5000);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const os = require('os');
|
const os = require('os');
|
||||||
|
|
||||||
const { errors, ipCheck } = require('arsenal');
|
|
||||||
const async = require('async');
|
const async = require('async');
|
||||||
|
const request = require('request');
|
||||||
|
|
||||||
const config = require('../Config').config;
|
const config = require('../Config').config;
|
||||||
const data = require('../data/wrapper');
|
const data = require('../data/wrapper');
|
||||||
|
@ -11,10 +11,9 @@ const metadata = require('../metadata/wrapper');
|
||||||
const REPORT_MODEL_VERSION = 1;
|
const REPORT_MODEL_VERSION = 1;
|
||||||
|
|
||||||
function cleanup(obj) {
|
function cleanup(obj) {
|
||||||
const ret = JSON.parse(JSON.stringify(obj));
|
return {
|
||||||
delete ret.authData;
|
overlayVersion: obj.overlayVersion,
|
||||||
delete ret.reportToken;
|
};
|
||||||
return ret;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function isAuthorized(clientIP, req) {
|
function isAuthorized(clientIP, req) {
|
||||||
|
@ -22,26 +21,6 @@ function isAuthorized(clientIP, req) {
|
||||||
req.headers['x-scal-report-token'] === config.reportToken;
|
req.headers['x-scal-report-token'] === config.reportToken;
|
||||||
}
|
}
|
||||||
|
|
||||||
const itemScanRefreshDelay = 1000 * 30 * 60; // 30 minutes
|
|
||||||
let lastItemScanTime;
|
|
||||||
let lastItemScanResult;
|
|
||||||
|
|
||||||
function countItems(log, cb) {
|
|
||||||
// The value is cached, as scanning all the dataset can be I/O intensive
|
|
||||||
if (lastItemScanTime === undefined ||
|
|
||||||
(Date.now() - lastItemScanTime) > itemScanRefreshDelay) {
|
|
||||||
return metadata.countItems(log, (err, res) => {
|
|
||||||
if (err) {
|
|
||||||
return cb(err);
|
|
||||||
}
|
|
||||||
lastItemScanTime = Date.now();
|
|
||||||
lastItemScanResult = res;
|
|
||||||
return cb(null, res);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return process.nextTick(cb, null, lastItemScanResult);
|
|
||||||
}
|
|
||||||
|
|
||||||
function getGitVersion(cb) {
|
function getGitVersion(cb) {
|
||||||
fs.readFile('.git/HEAD', 'ascii', (err, val) => {
|
fs.readFile('.git/HEAD', 'ascii', (err, val) => {
|
||||||
if (err && err.code === 'ENOENT') {
|
if (err && err.code === 'ENOENT') {
|
||||||
|
@ -94,6 +73,52 @@ function getSystemStats() {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getCRRStats(log, cb) {
|
||||||
|
log.debug('getting CRR stats', { method: 'getCRRStats' });
|
||||||
|
// TODO: Reuse metrics code from Backbeat by moving it to Arsenal instead of
|
||||||
|
// making an HTTP request to the Backbeat metrics route.
|
||||||
|
const params = { url: 'http://localhost:8900/_/metrics/crr/all' };
|
||||||
|
return request.get(params, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
log.error('failed to get CRR stats', {
|
||||||
|
method: 'getCRRStats',
|
||||||
|
error: err,
|
||||||
|
});
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
let body;
|
||||||
|
try {
|
||||||
|
body = JSON.parse(res.body);
|
||||||
|
} catch (parseErr) {
|
||||||
|
log.error('could not parse backbeat response', {
|
||||||
|
method: 'getCRRStats',
|
||||||
|
error: parseErr,
|
||||||
|
});
|
||||||
|
return cb(errors.InternalError
|
||||||
|
.customizeDescription('could not parse backbeat response'));
|
||||||
|
}
|
||||||
|
if (body.code) {
|
||||||
|
return cb(errors.InternalError
|
||||||
|
.customizeDescription('could not get metrics from backbeat'));
|
||||||
|
}
|
||||||
|
const stats = {
|
||||||
|
completions: {
|
||||||
|
count: parseFloat(body.completions.results.count),
|
||||||
|
size: parseFloat(body.completions.results.size),
|
||||||
|
},
|
||||||
|
backlog: {
|
||||||
|
count: parseFloat(body.backlog.results.count),
|
||||||
|
size: parseFloat(body.backlog.results.size),
|
||||||
|
},
|
||||||
|
throughput: {
|
||||||
|
count: parseFloat(body.throughput.results.count),
|
||||||
|
size: parseFloat(body.throughput.results.size),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return cb(null, stats);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sends back a report
|
* Sends back a report
|
||||||
*
|
*
|
||||||
|
@ -117,7 +142,8 @@ function reportHandler(clientIP, req, res, log) {
|
||||||
getMDDiskUsage: cb => metadata.getDiskUsage(log, cb),
|
getMDDiskUsage: cb => metadata.getDiskUsage(log, cb),
|
||||||
getDataDiskUsage: cb => data.getDiskUsage(log, cb),
|
getDataDiskUsage: cb => data.getDiskUsage(log, cb),
|
||||||
getVersion: cb => getGitVersion(cb),
|
getVersion: cb => getGitVersion(cb),
|
||||||
getObjectCount: cb => countItems(log, cb),
|
getObjectCount: cb => metadata.countItems(log, cb),
|
||||||
|
getCRRStats: cb => getCRRStats(log, cb),
|
||||||
},
|
},
|
||||||
(err, results) => {
|
(err, results) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -135,13 +161,14 @@ function reportHandler(clientIP, req, res, log) {
|
||||||
serverVersion: results.getVersion,
|
serverVersion: results.getVersion,
|
||||||
systemStats: getSystemStats(),
|
systemStats: getSystemStats(),
|
||||||
itemCounts: results.getObjectCount,
|
itemCounts: results.getObjectCount,
|
||||||
|
crrStats: results.getCRRStats,
|
||||||
|
|
||||||
config: cleanup(config),
|
config: cleanup(config),
|
||||||
};
|
};
|
||||||
|
|
||||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||||
res.write(JSON.stringify(response));
|
res.write(JSON.stringify(response));
|
||||||
log.end('report handler finished');
|
log.end().debug('report handler finished');
|
||||||
}
|
}
|
||||||
res.end();
|
res.end();
|
||||||
});
|
});
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
const { config } = require('./lib/Config.js');
|
const { config } = require('./lib/Config.js');
|
||||||
const MetadataFileServer =
|
const MetadataFileServer =
|
||||||
require('arsenal').storage.metadata.MetadataFileServer;
|
require('arsenal').storage.metadata.file.MetadataFileServer;
|
||||||
|
|
||||||
if (config.backends.metadata === 'file') {
|
if (config.backends.metadata === 'file') {
|
||||||
const mdServer = new MetadataFileServer(
|
const mdServer = new MetadataFileServer(
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -26,9 +26,12 @@
|
||||||
"azure-storage": "^2.1.0",
|
"azure-storage": "^2.1.0",
|
||||||
"bucketclient": "scality/bucketclient",
|
"bucketclient": "scality/bucketclient",
|
||||||
"commander": "^2.9.0",
|
"commander": "^2.9.0",
|
||||||
|
"node-forge": "^0.7.1",
|
||||||
"node-uuid": "^1.4.3",
|
"node-uuid": "^1.4.3",
|
||||||
"npm-run-all": "~4.0.2",
|
"npm-run-all": "~4.0.2",
|
||||||
|
"request": "^2.81.0",
|
||||||
"sproxydclient": "scality/sproxydclient",
|
"sproxydclient": "scality/sproxydclient",
|
||||||
|
"sql-where-parser": "~2.2.1",
|
||||||
"utapi": "scality/utapi",
|
"utapi": "scality/utapi",
|
||||||
"utf8": "~2.1.1",
|
"utf8": "~2.1.1",
|
||||||
"uuid": "^3.0.1",
|
"uuid": "^3.0.1",
|
||||||
|
|
|
@ -85,6 +85,15 @@ describe('Report route', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should remove unwanted sections from config', done => {
|
||||||
|
queryReport(done, response => {
|
||||||
|
if (response.config && response.config.mongodb) {
|
||||||
|
return done(new Error('config contains unwanted sections'));
|
||||||
|
}
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
it('should remove report token from config', done => {
|
it('should remove report token from config', done => {
|
||||||
queryReport(done, response => {
|
queryReport(done, response => {
|
||||||
if (response.config && response.config.reportToken) {
|
if (response.config && response.config.reportToken) {
|
||||||
|
|
|
@ -0,0 +1,122 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const { errors } = require('arsenal');
|
||||||
|
const validateSearch =
|
||||||
|
require('../../../lib/api/apiUtils/bucket/validateSearch');
|
||||||
|
|
||||||
|
|
||||||
|
describe('validate search where clause', () => {
|
||||||
|
const tests = [
|
||||||
|
{
|
||||||
|
it: 'should allow a valid simple search with table attribute',
|
||||||
|
searchParams: '`x-amz-meta-dog`="labrador"',
|
||||||
|
result: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should allow a simple search with known ' +
|
||||||
|
'column attribute',
|
||||||
|
searchParams: '`content-length`="10"',
|
||||||
|
result: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should allow valid search with AND',
|
||||||
|
searchParams: '`x-amz-meta-dog`="labrador" ' +
|
||||||
|
'AND `x-amz-meta-age`="5"',
|
||||||
|
result: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should allow valid search with OR',
|
||||||
|
searchParams: '`x-amz-meta-dog`="labrador" ' +
|
||||||
|
'OR `x-amz-meta-age`="5"',
|
||||||
|
result: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should allow valid search with double AND',
|
||||||
|
searchParams: '`x-amz-meta-dog`="labrador" ' +
|
||||||
|
'AND `x-amz-meta-age`="5" ' +
|
||||||
|
'AND `x-amz-meta-whatever`="ok"',
|
||||||
|
result: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should allow valid chained search with tables and columns',
|
||||||
|
searchParams: '`x-amz-meta-dog`="labrador" ' +
|
||||||
|
'AND `x-amz-meta-age`="5" ' +
|
||||||
|
'AND `content-length`="10"' +
|
||||||
|
'OR isDeleteMarker="true"' +
|
||||||
|
'AND `x-amz-meta-whatever`="ok"',
|
||||||
|
result: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should allow valid LIKE search',
|
||||||
|
searchParams: '`x-amz-meta-dog` LIKE "lab%" ' +
|
||||||
|
'AND `x-amz-meta-age` LIKE "5%" ' +
|
||||||
|
'AND `content-length`="10"',
|
||||||
|
result: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should disallow a LIKE search with invalid attribute',
|
||||||
|
searchParams: '`x-zma-meta-dog` LIKE "labrador"',
|
||||||
|
result: errors.InvalidArgument.customizeDescription('Search ' +
|
||||||
|
'param contains unknown attribute: x-zma-meta-dog'),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should disallow a simple search with unknown attribute',
|
||||||
|
searchParams: '`x-zma-meta-dog`="labrador"',
|
||||||
|
result: errors.InvalidArgument.customizeDescription('Search ' +
|
||||||
|
'param contains unknown attribute: x-zma-meta-dog'),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should disallow a compound search with unknown ' +
|
||||||
|
'attribute on right',
|
||||||
|
searchParams: '`x-amz-meta-dog`="labrador" AND ' +
|
||||||
|
'`x-zma-meta-dog`="labrador"',
|
||||||
|
result: errors.InvalidArgument.customizeDescription('Search ' +
|
||||||
|
'param contains unknown attribute: x-zma-meta-dog'),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should disallow a compound search with unknown ' +
|
||||||
|
'attribute on left',
|
||||||
|
searchParams: '`x-zma-meta-dog`="labrador" AND ' +
|
||||||
|
'`x-amz-meta-dog`="labrador"',
|
||||||
|
result: errors.InvalidArgument.customizeDescription('Search ' +
|
||||||
|
'param contains unknown attribute: x-zma-meta-dog'),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should disallow a chained search with one invalid ' +
|
||||||
|
'table attribute',
|
||||||
|
searchParams: '`x-amz-meta-dog`="labrador" ' +
|
||||||
|
'AND `x-amz-meta-age`="5" ' +
|
||||||
|
'OR `x-zma-meta-whatever`="ok"',
|
||||||
|
result: errors.InvalidArgument.customizeDescription('Search ' +
|
||||||
|
'param contains unknown attribute: x-zma-meta-whatever'),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should disallow a simple search with unknown ' +
|
||||||
|
'column attribute',
|
||||||
|
searchParams: 'whatever="labrador"',
|
||||||
|
result: errors.InvalidArgument.customizeDescription('Search ' +
|
||||||
|
'param contains unknown attribute: whatever'),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
it: 'should disallow a chained search with one invalid ' +
|
||||||
|
'column attribute',
|
||||||
|
searchParams: '`x-amz-meta-dog`="labrador" ' +
|
||||||
|
'AND `x-amz-meta-age`="5" ' +
|
||||||
|
'OR madeUp="something"' +
|
||||||
|
'OR `x-amz-meta-whatever`="ok"',
|
||||||
|
result: errors.InvalidArgument.customizeDescription('Search ' +
|
||||||
|
'param contains unknown attribute: madeUp'),
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
tests.forEach(test => {
|
||||||
|
it(test.it, () => {
|
||||||
|
const actualResult =
|
||||||
|
validateSearch(test.searchParams);
|
||||||
|
if (test.result === undefined) {
|
||||||
|
assert(typeof actualResult.ast === 'object');
|
||||||
|
} else {
|
||||||
|
assert.deepStrictEqual(actualResult.error, test.result);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
Loading…
Reference in New Issue