Compare commits
60 Commits
3c54bd740f
...
bcabdeeadb
Author | SHA1 | Date |
---|---|---|
Jonathan Gramain | bcabdeeadb | |
Jonathan Gramain | e166d1a894 | |
ironman-machine | 44800cf175 | |
Jonathan Gramain | 51a4146876 | |
Rahul Padigela | 563bbfcb8b | |
Bennett Buchanan | 4942fab225 | |
Rahul Padigela | 91a828805b | |
Rahul Padigela | eb9b60c0ef | |
alexandremerle | 66acfbbab4 | |
Electra Chong | efe8ed76ba | |
Rahul Padigela | dad0d456d3 | |
Rahul Padigela | 4601794d49 | |
Rahul Padigela | 36157fb688 | |
Bennett Buchanan | 639374522d | |
Rahul Padigela | 2499ce7277 | |
Rahul Padigela | 0bab4069cd | |
Rahul Padigela | 5949e12ffc | |
Lauren Spiegel | 7ca3c0515a | |
Rahul Padigela | 711d64d5f1 | |
Rahul Padigela | b22d12009a | |
Jonathan Gramain | 50a90d2b41 | |
Dora Korpar | a1ce222a87 | |
Jonathan Gramain | a77bf3126d | |
ironman-machine | 300769dda6 | |
Jonathan Gramain | a65c554f64 | |
philipyoo | 894d41a30b | |
Rahul Padigela | 673da3de99 | |
Jonathan Gramain | 0f535cc26a | |
ironman-machine | b1447906dd | |
mvaude | d7e4e3b7aa | |
mvaude | b445a8487b | |
Rahul Padigela | af460a0939 | |
Bennett Buchanan | 8cf3d091cb | |
ironman-machine | 1f77deab61 | |
Jonathan Gramain | 96823f0a06 | |
ironman-machine | 41a823a57e | |
Jonathan Gramain | b31bc06e63 | |
Rahul Padigela | 46d703de6b | |
ironman-machine | bb3e63ea17 | |
Electra Chong | 5d466e01b3 | |
Electra Chong | 7cbdac5f52 | |
philipyoo | 1e2d9be8f7 | |
Electra Chong | e89395c428 | |
Vianney Rancurel | 58ac3abe1a | |
ironman-machine | c22b937fe5 | |
Jonathan Gramain | 4c1fa030bf | |
Rahul Padigela | 7e2676f635 | |
ironman-machine | e5cf9b1aec | |
Alexandre Merle | d1e7f05c7d | |
ironman-machine | 4323bfaab0 | |
Alexandre Merle | 9d9d21127c | |
ironman-machine | dd9df1745c | |
Alexandre Merle | 2fcf728d38 | |
Lauren Spiegel | e9993ed64e | |
Dora Korpar | 012e281366 | |
philipyoo | 0d62d5a161 | |
Rahul Padigela | cc5dad3e83 | |
Bennett Buchanan | 62f2accc5c | |
ironman-machine | 6ad2af98cd | |
Dora Korpar | 286a599ae8 |
|
@ -1 +1,5 @@
|
||||||
|
# Logs
|
||||||
|
*.log
|
||||||
|
|
||||||
|
# Dependency directory
|
||||||
node_modules/
|
node_modules/
|
||||||
|
|
|
@ -7,6 +7,8 @@ general:
|
||||||
machine:
|
machine:
|
||||||
node:
|
node:
|
||||||
version: 6.9.5
|
version: 6.9.5
|
||||||
|
services:
|
||||||
|
- redis
|
||||||
environment:
|
environment:
|
||||||
CXX: g++-4.9
|
CXX: g++-4.9
|
||||||
|
|
||||||
|
|
18
index.js
18
index.js
|
@ -29,6 +29,7 @@ module.exports = {
|
||||||
evaluators: require('./lib/policyEvaluator/evaluator.js'),
|
evaluators: require('./lib/policyEvaluator/evaluator.js'),
|
||||||
validateUserPolicy: require('./lib/policy/policyValidator')
|
validateUserPolicy: require('./lib/policy/policyValidator')
|
||||||
.validateUserPolicy,
|
.validateUserPolicy,
|
||||||
|
evaluatePrincipal: require('./lib/policyEvaluator/principal'),
|
||||||
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
|
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
|
||||||
},
|
},
|
||||||
Clustering: require('./lib/Clustering'),
|
Clustering: require('./lib/Clustering'),
|
||||||
|
@ -59,12 +60,22 @@ module.exports = {
|
||||||
},
|
},
|
||||||
s3middleware: {
|
s3middleware: {
|
||||||
userMetadata: require('./lib/s3middleware/userMetadata'),
|
userMetadata: require('./lib/s3middleware/userMetadata'),
|
||||||
|
convertToXml: require('./lib/s3middleware/convertToXml'),
|
||||||
escapeForXml: require('./lib/s3middleware/escapeForXml'),
|
escapeForXml: require('./lib/s3middleware/escapeForXml'),
|
||||||
tagging: require('./lib/s3middleware/tagging'),
|
tagging: require('./lib/s3middleware/tagging'),
|
||||||
validateConditionalHeaders:
|
validateConditionalHeaders:
|
||||||
require('./lib/s3middleware/validateConditionalHeaders')
|
require('./lib/s3middleware/validateConditionalHeaders')
|
||||||
.validateConditionalHeaders,
|
.validateConditionalHeaders,
|
||||||
MD5Sum: require('./lib/s3middleware/MD5Sum'),
|
MD5Sum: require('./lib/s3middleware/MD5Sum'),
|
||||||
|
objectUtils: require('./lib/s3middleware/objectUtils'),
|
||||||
|
azureHelper: {
|
||||||
|
mpuUtils:
|
||||||
|
require('./lib/s3middleware/azureHelpers/mpuUtils'),
|
||||||
|
ResultsCollector:
|
||||||
|
require('./lib/s3middleware/azureHelpers/ResultsCollector'),
|
||||||
|
SubStreamInterface:
|
||||||
|
require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
|
||||||
|
},
|
||||||
},
|
},
|
||||||
storage: {
|
storage: {
|
||||||
metadata: {
|
metadata: {
|
||||||
|
@ -83,12 +94,17 @@ module.exports = {
|
||||||
},
|
},
|
||||||
utils: require('./lib/storage/utils'),
|
utils: require('./lib/storage/utils'),
|
||||||
},
|
},
|
||||||
|
|
||||||
models: {
|
models: {
|
||||||
BucketInfo: require('./lib/models/BucketInfo'),
|
BucketInfo: require('./lib/models/BucketInfo'),
|
||||||
ObjectMD: require('./lib/models/ObjectMD'),
|
ObjectMD: require('./lib/models/ObjectMD'),
|
||||||
|
ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
|
||||||
|
ARN: require('./lib/models/ARN'),
|
||||||
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
|
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
|
||||||
ReplicationConfiguration:
|
ReplicationConfiguration:
|
||||||
require('./lib/models/ReplicationConfiguration'),
|
require('./lib/models/ReplicationConfiguration'),
|
||||||
},
|
},
|
||||||
|
metrics: {
|
||||||
|
StatsClient: require('./lib/metrics/StatsClient'),
|
||||||
|
RedisClient: require('./lib/metrics/RedisClient'),
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
|
@ -49,6 +49,14 @@ class AuthInfo {
|
||||||
isRequesterPublicUser() {
|
isRequesterPublicUser() {
|
||||||
return this.canonicalID === constants.publicId;
|
return this.canonicalID === constants.publicId;
|
||||||
}
|
}
|
||||||
|
isRequesterAServiceAccount() {
|
||||||
|
return this.canonicalID.startsWith(
|
||||||
|
`${constants.zenkoServiceAccount}/`);
|
||||||
|
}
|
||||||
|
isRequesterThisServiceAccount(serviceName) {
|
||||||
|
return this.canonicalID ===
|
||||||
|
`${constants.zenkoServiceAccount}/${serviceName}`;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = AuthInfo;
|
module.exports = AuthInfo;
|
||||||
|
|
|
@ -12,6 +12,7 @@ const convertUTCtoISO8601 = require('./v4/timeUtils').convertUTCtoISO8601;
|
||||||
const vaultUtilities = require('./in_memory/vaultUtilities');
|
const vaultUtilities = require('./in_memory/vaultUtilities');
|
||||||
const backend = require('./in_memory/Backend');
|
const backend = require('./in_memory/Backend');
|
||||||
const validateAuthConfig = require('./in_memory/validateAuthConfig');
|
const validateAuthConfig = require('./in_memory/validateAuthConfig');
|
||||||
|
const AuthLoader = require('./in_memory/AuthLoader');
|
||||||
const Vault = require('./Vault');
|
const Vault = require('./Vault');
|
||||||
|
|
||||||
let vault = null;
|
let vault = null;
|
||||||
|
@ -152,10 +153,11 @@ function doAuth(request, log, cb, awsService, requestContexts) {
|
||||||
* @param {string} accessKey - the accessKey
|
* @param {string} accessKey - the accessKey
|
||||||
* @param {string} secretKeyValue - the secretKey
|
* @param {string} secretKeyValue - the secretKey
|
||||||
* @param {string} awsService - Aws service related
|
* @param {string} awsService - Aws service related
|
||||||
|
* @param {sting} [proxyPath] - path that gets proxied by reverse proxy
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
function generateV4Headers(request, data, accessKey, secretKeyValue,
|
function generateV4Headers(request, data, accessKey, secretKeyValue,
|
||||||
awsService) {
|
awsService, proxyPath) {
|
||||||
Object.assign(request, { headers: {} });
|
Object.assign(request, { headers: {} });
|
||||||
const amzDate = convertUTCtoISO8601(Date.now());
|
const amzDate = convertUTCtoISO8601(Date.now());
|
||||||
// get date without time
|
// get date without time
|
||||||
|
@ -186,8 +188,8 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
|
||||||
|| headerName === 'host'
|
|| headerName === 'host'
|
||||||
).sort().join(';');
|
).sort().join(';');
|
||||||
const params = { request, signedHeaders, payloadChecksum,
|
const params = { request, signedHeaders, payloadChecksum,
|
||||||
credentialScope, timestamp, query: data,
|
credentialScope, timestamp, query: data,
|
||||||
awsService: service };
|
awsService: service, proxyPath };
|
||||||
const stringToSign = constructStringToSignV4(params);
|
const stringToSign = constructStringToSignV4(params);
|
||||||
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue,
|
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue,
|
||||||
region,
|
region,
|
||||||
|
@ -214,6 +216,7 @@ module.exports = {
|
||||||
inMemory: {
|
inMemory: {
|
||||||
backend,
|
backend,
|
||||||
validateAuthConfig,
|
validateAuthConfig,
|
||||||
|
AuthLoader,
|
||||||
},
|
},
|
||||||
AuthInfo,
|
AuthInfo,
|
||||||
Vault,
|
Vault,
|
||||||
|
|
|
@ -0,0 +1,223 @@
|
||||||
|
const fs = require('fs');
|
||||||
|
const glob = require('simple-glob');
|
||||||
|
const joi = require('joi');
|
||||||
|
const werelogs = require('werelogs');
|
||||||
|
|
||||||
|
const ARN = require('../../models/ARN');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load authentication information from files or pre-loaded account
|
||||||
|
* objects
|
||||||
|
*
|
||||||
|
* @class AuthLoader
|
||||||
|
*/
|
||||||
|
class AuthLoader {
|
||||||
|
constructor(logApi) {
|
||||||
|
this._log = new (logApi || werelogs).Logger('S3');
|
||||||
|
this._authData = { accounts: [] };
|
||||||
|
// null: unknown validity, true/false: valid or invalid
|
||||||
|
this._isValid = null;
|
||||||
|
|
||||||
|
this._joiKeysValidator = joi.array()
|
||||||
|
.items({
|
||||||
|
access: joi.string().required(),
|
||||||
|
secret: joi.string().required(),
|
||||||
|
})
|
||||||
|
.required();
|
||||||
|
|
||||||
|
const accountsJoi = joi.array()
|
||||||
|
.items({
|
||||||
|
name: joi.string().required(),
|
||||||
|
email: joi.string().email().required(),
|
||||||
|
arn: joi.string().required(),
|
||||||
|
canonicalID: joi.string().required(),
|
||||||
|
shortid: joi.string().regex(/^[0-9]{12}$/).required(),
|
||||||
|
keys: this._joiKeysValidator,
|
||||||
|
// backward-compat
|
||||||
|
users: joi.array(),
|
||||||
|
})
|
||||||
|
.required()
|
||||||
|
.unique('arn')
|
||||||
|
.unique('email')
|
||||||
|
.unique('canonicalID');
|
||||||
|
this._joiValidator = joi.object({ accounts: accountsJoi });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* add one or more accounts to the authentication info
|
||||||
|
*
|
||||||
|
* @param {object} authData - authentication data
|
||||||
|
* @param {object[]} authData.accounts - array of account data
|
||||||
|
* @param {string} authData.accounts[].name - account name
|
||||||
|
* @param {string} authData.accounts[].email: email address
|
||||||
|
* @param {string} authData.accounts[].arn: account ARN,
|
||||||
|
* e.g. 'arn:aws:iam::123456789012:root'
|
||||||
|
* @param {string} authData.accounts[].canonicalID account
|
||||||
|
* canonical ID
|
||||||
|
* @param {string} authData.accounts[].shortid account ID number,
|
||||||
|
* e.g. '123456789012'
|
||||||
|
* @param {object[]} authData.accounts[].keys array of
|
||||||
|
* access/secret keys
|
||||||
|
* @param {object[]} authData.accounts[].keys[].access access key
|
||||||
|
* @param {object[]} authData.accounts[].keys[].secret secret key
|
||||||
|
* @param {string} [filePath] - optional file path info for
|
||||||
|
* logging purpose
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addAccounts(authData, filePath) {
|
||||||
|
const isValid = this._validateData(authData, filePath);
|
||||||
|
if (isValid) {
|
||||||
|
this._authData.accounts =
|
||||||
|
this._authData.accounts.concat(authData.accounts);
|
||||||
|
// defer validity checking when getting data to avoid
|
||||||
|
// logging multiple times the errors (we need to validate
|
||||||
|
// all accounts at once to detect duplicate values)
|
||||||
|
if (this._isValid) {
|
||||||
|
this._isValid = null;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this._isValid = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* add account information from a file
|
||||||
|
*
|
||||||
|
* @param {string} filePath - file path containing JSON
|
||||||
|
* authentication info (see {@link addAccounts()} for format)
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addFile(filePath) {
|
||||||
|
const authData = JSON.parse(fs.readFileSync(filePath));
|
||||||
|
this.addAccounts(authData, filePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* add account information from a filesystem path
|
||||||
|
*
|
||||||
|
* @param {string|string[]} globPattern - filesystem glob pattern,
|
||||||
|
* can be a single string or an array of glob patterns. Globs
|
||||||
|
* can be simple file paths or can contain glob matching
|
||||||
|
* characters, like '/a/b/*.json'. The matching files are
|
||||||
|
* individually loaded as JSON and accounts are added. See
|
||||||
|
* {@link addAccounts()} for JSON format.
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addFilesByGlob(globPattern) {
|
||||||
|
const files = glob(globPattern);
|
||||||
|
files.forEach(filePath => this.addFile(filePath));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* perform validation on authentication info previously
|
||||||
|
* loaded. Note that it has to be done on the entire set after an
|
||||||
|
* update to catch duplicate account IDs or access keys.
|
||||||
|
*
|
||||||
|
* @return {boolean} true if authentication info is valid
|
||||||
|
* false otherwise
|
||||||
|
*/
|
||||||
|
validate() {
|
||||||
|
if (this._isValid === null) {
|
||||||
|
this._isValid = this._validateData(this._authData);
|
||||||
|
}
|
||||||
|
return this._isValid;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get authentication info as a plain JS object containing all accounts
|
||||||
|
* under the "accounts" attribute, with validation.
|
||||||
|
*
|
||||||
|
* @return {object|null} the validated authentication data
|
||||||
|
* null if invalid
|
||||||
|
*/
|
||||||
|
getData() {
|
||||||
|
return this.validate() ? this._authData : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
_validateData(authData, filePath) {
|
||||||
|
const res = joi.validate(authData, this._joiValidator,
|
||||||
|
{ abortEarly: false });
|
||||||
|
if (res.error) {
|
||||||
|
this._dumpJoiErrors(res.error.details, filePath);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
let allKeys = [];
|
||||||
|
let arnError = false;
|
||||||
|
const validatedAuth = res.value;
|
||||||
|
validatedAuth.accounts.forEach(account => {
|
||||||
|
// backward-compat: ignore arn if starts with 'aws:' and log a
|
||||||
|
// warning
|
||||||
|
if (account.arn.startsWith('aws:')) {
|
||||||
|
this._log.error(
|
||||||
|
'account must have a valid AWS ARN, legacy examples ' +
|
||||||
|
'starting with \'aws:\' are not supported anymore. ' +
|
||||||
|
'Please convert to a proper account entry (see ' +
|
||||||
|
'examples at https://github.com/scality/S3/blob/' +
|
||||||
|
'master/conf/authdata.json). Also note that support ' +
|
||||||
|
'for account users has been dropped.',
|
||||||
|
{ accountName: account.name, accountArn: account.arn,
|
||||||
|
filePath });
|
||||||
|
arnError = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (account.users) {
|
||||||
|
this._log.error(
|
||||||
|
'support for account users has been dropped, consider ' +
|
||||||
|
'turning users into account entries (see examples at ' +
|
||||||
|
'https://github.com/scality/S3/blob/master/conf/' +
|
||||||
|
'authdata.json)',
|
||||||
|
{ accountName: account.name, accountArn: account.arn,
|
||||||
|
filePath });
|
||||||
|
arnError = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const arnObj = ARN.createFromString(account.arn);
|
||||||
|
if (arnObj.error) {
|
||||||
|
this._log.error(
|
||||||
|
'authentication config validation error',
|
||||||
|
{ reason: arnObj.error.description,
|
||||||
|
accountName: account.name, accountArn: account.arn,
|
||||||
|
filePath });
|
||||||
|
arnError = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (!arnObj.isIAMAccount()) {
|
||||||
|
this._log.error(
|
||||||
|
'authentication config validation error',
|
||||||
|
{ reason: 'not an IAM account ARN',
|
||||||
|
accountName: account.name, accountArn: account.arn,
|
||||||
|
filePath });
|
||||||
|
arnError = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
allKeys = allKeys.concat(account.keys);
|
||||||
|
});
|
||||||
|
if (arnError) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const uniqueKeysRes = joi.validate(
|
||||||
|
allKeys, this._joiKeysValidator.unique('access'));
|
||||||
|
if (uniqueKeysRes.error) {
|
||||||
|
this._dumpJoiErrors(uniqueKeysRes.error.details, filePath);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
_dumpJoiErrors(errors, filePath) {
|
||||||
|
errors.forEach(err => {
|
||||||
|
const logInfo = { item: err.path, filePath };
|
||||||
|
if (err.type === 'array.unique') {
|
||||||
|
logInfo.reason = `duplicate value '${err.context.path}'`;
|
||||||
|
logInfo.dupValue = err.context.value[err.context.path];
|
||||||
|
} else {
|
||||||
|
logInfo.reason = err.message;
|
||||||
|
logInfo.context = err.context;
|
||||||
|
}
|
||||||
|
this._log.error('authentication config validation error',
|
||||||
|
logInfo);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = AuthLoader;
|
|
@ -7,10 +7,6 @@ const calculateSigningKey = require('./vaultUtilities').calculateSigningKey;
|
||||||
const hashSignature = require('./vaultUtilities').hashSignature;
|
const hashSignature = require('./vaultUtilities').hashSignature;
|
||||||
const Indexer = require('./Indexer');
|
const Indexer = require('./Indexer');
|
||||||
|
|
||||||
function _buildArn(service, generalResource, specificResource) {
|
|
||||||
return `arn:aws:${service}:::${generalResource}/${specificResource}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
function _formatResponse(userInfoToSend) {
|
function _formatResponse(userInfoToSend) {
|
||||||
return {
|
return {
|
||||||
message: {
|
message: {
|
||||||
|
@ -42,7 +38,7 @@ class Backend {
|
||||||
/** verifySignatureV2
|
/** verifySignatureV2
|
||||||
* @param {string} stringToSign - string to sign built per AWS rules
|
* @param {string} stringToSign - string to sign built per AWS rules
|
||||||
* @param {string} signatureFromRequest - signature sent with request
|
* @param {string} signatureFromRequest - signature sent with request
|
||||||
* @param {string} accessKey - user's accessKey
|
* @param {string} accessKey - account accessKey
|
||||||
* @param {object} options - contains algorithm (SHA1 or SHA256)
|
* @param {object} options - contains algorithm (SHA1 or SHA256)
|
||||||
* @param {function} callback - callback with either error or user info
|
* @param {function} callback - callback with either error or user info
|
||||||
* @return {function} calls callback
|
* @return {function} calls callback
|
||||||
|
@ -73,7 +69,7 @@ class Backend {
|
||||||
/** verifySignatureV4
|
/** verifySignatureV4
|
||||||
* @param {string} stringToSign - string to sign built per AWS rules
|
* @param {string} stringToSign - string to sign built per AWS rules
|
||||||
* @param {string} signatureFromRequest - signature sent with request
|
* @param {string} signatureFromRequest - signature sent with request
|
||||||
* @param {string} accessKey - user's accessKey
|
* @param {string} accessKey - account accessKey
|
||||||
* @param {string} region - region specified in request credential
|
* @param {string} region - region specified in request credential
|
||||||
* @param {string} scopeDate - date specified in request credential
|
* @param {string} scopeDate - date specified in request credential
|
||||||
* @param {object} options - options to send to Vault
|
* @param {object} options - options to send to Vault
|
||||||
|
@ -161,55 +157,6 @@ class Backend {
|
||||||
};
|
};
|
||||||
return cb(null, vaultReturnObject);
|
return cb(null, vaultReturnObject);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Mocks Vault's response to a policy evaluation request
|
|
||||||
* Since policies not actually implemented in memory backend,
|
|
||||||
* we allow users to proceed with request.
|
|
||||||
* @param {object} requestContextParams - parameters needed to construct
|
|
||||||
* requestContext in Vault
|
|
||||||
* @param {object} requestContextParams.constantParams -
|
|
||||||
* params that have the
|
|
||||||
* same value for each requestContext to be constructed in Vault
|
|
||||||
* @param {object} requestContextParams.paramaterize - params that have
|
|
||||||
* arrays as values since a requestContext needs to be constructed with
|
|
||||||
* each option in Vault
|
|
||||||
* @param {object[]} requestContextParams.paramaterize.specificResource -
|
|
||||||
* specific resources paramaterized as an array of objects containing
|
|
||||||
* properties `key` and optional `versionId`
|
|
||||||
* @param {string} userArn - arn of requesting user
|
|
||||||
* @param {object} log - log object
|
|
||||||
* @param {function} cb - callback with either error or an array
|
|
||||||
* of authorization results
|
|
||||||
* @returns {undefined}
|
|
||||||
* @callback called with (err, vaultReturnObject)
|
|
||||||
*/
|
|
||||||
checkPolicies(requestContextParams, userArn, log, cb) {
|
|
||||||
let results;
|
|
||||||
const parameterizeParams = requestContextParams.parameterize;
|
|
||||||
if (parameterizeParams && parameterizeParams.specificResource) {
|
|
||||||
// object is parameterized
|
|
||||||
results = parameterizeParams.specificResource.map(obj => ({
|
|
||||||
isAllowed: true,
|
|
||||||
arn: _buildArn(this.service, requestContextParams
|
|
||||||
.constantParams.generalResource, obj.key),
|
|
||||||
versionId: obj.versionId,
|
|
||||||
}));
|
|
||||||
} else {
|
|
||||||
results = [{
|
|
||||||
isAllowed: true,
|
|
||||||
arn: _buildArn(this.service, requestContextParams
|
|
||||||
.constantParams.generalResource, requestContextParams
|
|
||||||
.constantParams.specificResource),
|
|
||||||
}];
|
|
||||||
}
|
|
||||||
const vaultReturnObject = {
|
|
||||||
message: {
|
|
||||||
body: results,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
return cb(null, vaultReturnObject);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -226,9 +173,6 @@ class S3AuthBackend extends Backend {
|
||||||
* @param {object[]=} authdata.accounts[].keys - array of key objects
|
* @param {object[]=} authdata.accounts[].keys - array of key objects
|
||||||
* @param {string} authdata.accounts[].keys[].access - access key
|
* @param {string} authdata.accounts[].keys[].access - access key
|
||||||
* @param {string} authdata.accounts[].keys[].secret - secret key
|
* @param {string} authdata.accounts[].keys[].secret - secret key
|
||||||
* @param {object[]=} authdata.accounts[].users - array of user objects:
|
|
||||||
* note, same properties as account except no canonical ID / sas token
|
|
||||||
* @param {string=} authdata.accounts[].sasToken - Azure SAS token
|
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
constructor(authdata) {
|
constructor(authdata) {
|
||||||
|
|
|
@ -19,9 +19,6 @@ class Indexer {
|
||||||
* @param {object[]=} authdata.accounts[].keys - array of key objects
|
* @param {object[]=} authdata.accounts[].keys - array of key objects
|
||||||
* @param {string} authdata.accounts[].keys[].access - access key
|
* @param {string} authdata.accounts[].keys[].access - access key
|
||||||
* @param {string} authdata.accounts[].keys[].secret - secret key
|
* @param {string} authdata.accounts[].keys[].secret - secret key
|
||||||
* @param {object[]=} authdata.accounts[].users - array of user objects:
|
|
||||||
* note, same properties as account except no canonical ID / sas token
|
|
||||||
* @param {string=} authdata.accounts[].sasToken - Azure SAS token
|
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
constructor(authdata) {
|
constructor(authdata) {
|
||||||
|
@ -30,10 +27,6 @@ class Indexer {
|
||||||
accessKey: {},
|
accessKey: {},
|
||||||
email: {},
|
email: {},
|
||||||
};
|
};
|
||||||
this.usersBy = {
|
|
||||||
accessKey: {},
|
|
||||||
email: {},
|
|
||||||
};
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* This may happen if the application is configured to use another
|
* This may happen if the application is configured to use another
|
||||||
|
@ -47,23 +40,6 @@ class Indexer {
|
||||||
this._build(authdata);
|
this._build(authdata);
|
||||||
}
|
}
|
||||||
|
|
||||||
_indexUser(account, user) {
|
|
||||||
const userData = {
|
|
||||||
arn: account.arn,
|
|
||||||
canonicalID: account.canonicalID,
|
|
||||||
shortid: account.shortid,
|
|
||||||
accountDisplayName: account.accountDisplayName,
|
|
||||||
IAMdisplayName: user.name,
|
|
||||||
email: user.email.toLowerCase(),
|
|
||||||
keys: [],
|
|
||||||
};
|
|
||||||
this.usersBy.email[userData.email] = userData;
|
|
||||||
user.keys.forEach(key => {
|
|
||||||
userData.keys.push(key);
|
|
||||||
this.usersBy.accessKey[key.access] = userData;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
_indexAccount(account) {
|
_indexAccount(account) {
|
||||||
const accountData = {
|
const accountData = {
|
||||||
arn: account.arn,
|
arn: account.arn,
|
||||||
|
@ -81,11 +57,6 @@ class Indexer {
|
||||||
this.accountsBy.accessKey[key.access] = accountData;
|
this.accountsBy.accessKey[key.access] = accountData;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (account.users !== undefined) {
|
|
||||||
account.users.forEach(user => {
|
|
||||||
this._indexUser(accountData, user);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
_build(authdata) {
|
_build(authdata) {
|
||||||
|
@ -126,10 +97,7 @@ class Indexer {
|
||||||
* @return {Object} entity.email - The entity's lowercased email
|
* @return {Object} entity.email - The entity's lowercased email
|
||||||
*/
|
*/
|
||||||
getEntityByKey(key) {
|
getEntityByKey(key) {
|
||||||
if (this.accountsBy.accessKey.hasOwnProperty(key)) {
|
return this.accountsBy.accessKey[key];
|
||||||
return this.accountsBy.accessKey[key];
|
|
||||||
}
|
|
||||||
return this.usersBy.accessKey[key];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -150,9 +118,6 @@ class Indexer {
|
||||||
*/
|
*/
|
||||||
getEntityByEmail(email) {
|
getEntityByEmail(email) {
|
||||||
const lowerCasedEmail = email.toLowerCase();
|
const lowerCasedEmail = email.toLowerCase();
|
||||||
if (this.usersBy.email.hasOwnProperty(lowerCasedEmail)) {
|
|
||||||
return this.usersBy.email[lowerCasedEmail];
|
|
||||||
}
|
|
||||||
return this.accountsBy.email[lowerCasedEmail];
|
return this.accountsBy.email[lowerCasedEmail];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,194 +1,18 @@
|
||||||
const werelogs = require('werelogs');
|
const AuthLoader = require('./AuthLoader');
|
||||||
|
|
||||||
function _incr(count) {
|
|
||||||
if (count !== undefined) {
|
|
||||||
return count + 1;
|
|
||||||
}
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This function ensures that the field `name` inside `container` is of the
|
* @deprecated please use {@link AuthLoader} class instead
|
||||||
* expected `type` inside `obj`. If any error is found, an entry is added into
|
|
||||||
* the error collector object.
|
|
||||||
*
|
*
|
||||||
* @param {object} data - the error collector object
|
|
||||||
* @param {string} container - the name of the entity that contains
|
|
||||||
* what we're checking
|
|
||||||
* @param {string} name - the name of the entity we're checking for
|
|
||||||
* @param {string} type - expected typename of the entity we're checking
|
|
||||||
* @param {object} obj - the object we're checking the fields of
|
|
||||||
* @return {boolean} true if the type is Ok and no error found
|
|
||||||
* false if an error was found and reported
|
|
||||||
*/
|
|
||||||
function _checkType(data, container, name, type, obj) {
|
|
||||||
if ((type === 'array' && !Array.isArray(obj[name]))
|
|
||||||
|| (type !== 'array' && typeof obj[name] !== type)) {
|
|
||||||
data.errors.push({
|
|
||||||
txt: 'property is not of the expected type',
|
|
||||||
obj: {
|
|
||||||
entity: container,
|
|
||||||
property: name,
|
|
||||||
type: typeof obj[name],
|
|
||||||
expectedType: type,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function ensures that the field `name` inside `obj` which is a
|
|
||||||
* `container`. If any error is found, an entry is added into the error
|
|
||||||
* collector object.
|
|
||||||
*
|
|
||||||
* @param {object} data - the error collector object
|
|
||||||
* @param {string} container - the name of the entity that contains
|
|
||||||
* what we're checking
|
|
||||||
* @param {string} name - the name of the entity we're checking for
|
|
||||||
* @param {string} type - expected typename of the entity we're checking
|
|
||||||
* @param {object} obj - the object we're checking the fields of
|
|
||||||
* @return {boolean} true if the field exists and type is Ok
|
|
||||||
* false if an error was found and reported
|
|
||||||
*/
|
|
||||||
function _checkExists(data, container, name, type, obj) {
|
|
||||||
if (obj[name] === undefined) {
|
|
||||||
data.errors.push({
|
|
||||||
txt: 'missing property in auth entity',
|
|
||||||
obj: {
|
|
||||||
entity: container,
|
|
||||||
property: name,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return _checkType(data, container, name, type, obj);
|
|
||||||
}
|
|
||||||
|
|
||||||
function _checkUser(data, userObj) {
|
|
||||||
if (_checkExists(data, 'User', 'arn', 'string', userObj)) {
|
|
||||||
// eslint-disable-next-line no-param-reassign
|
|
||||||
data.arns[userObj.arn] = _incr(data.arns[userObj.arn]);
|
|
||||||
}
|
|
||||||
if (_checkExists(data, 'User', 'email', 'string', userObj)) {
|
|
||||||
// eslint-disable-next-line no-param-reassign
|
|
||||||
data.emails[userObj.email] = _incr(data.emails[userObj.email]);
|
|
||||||
}
|
|
||||||
if (_checkExists(data, 'User', 'keys', 'array', userObj)) {
|
|
||||||
userObj.keys.forEach(keyObj => {
|
|
||||||
// eslint-disable-next-line no-param-reassign
|
|
||||||
data.keys[keyObj.access] = _incr(data.keys[keyObj.access]);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function _checkAccount(data, accountObj, checkSas) {
|
|
||||||
if (_checkExists(data, 'Account', 'email', 'string', accountObj)) {
|
|
||||||
// eslint-disable-next-line no-param-reassign
|
|
||||||
data.emails[accountObj.email] = _incr(data.emails[accountObj.email]);
|
|
||||||
}
|
|
||||||
if (_checkExists(data, 'Account', 'arn', 'string', accountObj)) {
|
|
||||||
// eslint-disable-next-line no-param-reassign
|
|
||||||
data.arns[accountObj.arn] = _incr(data.arns[accountObj.arn]);
|
|
||||||
}
|
|
||||||
if (_checkExists(data, 'Account', 'canonicalID', 'string', accountObj)) {
|
|
||||||
// eslint-disable-next-line no-param-reassign
|
|
||||||
data.canonicalIds[accountObj.canonicalID] =
|
|
||||||
_incr(data.canonicalIds[accountObj.canonicalID]);
|
|
||||||
}
|
|
||||||
if (checkSas &&
|
|
||||||
_checkExists(data, 'Account', 'sasToken', 'string', accountObj)) {
|
|
||||||
// eslint-disable-next-line no-param-reassign
|
|
||||||
data.sasTokens[accountObj.sasToken] =
|
|
||||||
_incr(data.sasTokens[accountObj.sasToken]);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (accountObj.users) {
|
|
||||||
if (_checkType(data, 'Account', 'users', 'array', accountObj)) {
|
|
||||||
accountObj.users.forEach(userObj => _checkUser(data, userObj));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (accountObj.keys) {
|
|
||||||
if (_checkType(data, 'Account', 'keys', 'array', accountObj)) {
|
|
||||||
accountObj.keys.forEach(keyObj => {
|
|
||||||
// eslint-disable-next-line no-param-reassign
|
|
||||||
data.keys[keyObj.access] = _incr(data.keys[keyObj.access]);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function _dumpCountError(property, obj, log) {
|
|
||||||
let count = 0;
|
|
||||||
Object.keys(obj).forEach(key => {
|
|
||||||
if (obj[key] > 1) {
|
|
||||||
log.error('property should be unique', {
|
|
||||||
property,
|
|
||||||
value: key,
|
|
||||||
count: obj[key],
|
|
||||||
});
|
|
||||||
++count;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return count;
|
|
||||||
}
|
|
||||||
|
|
||||||
function _dumpErrors(checkData, log) {
|
|
||||||
let nerr = _dumpCountError('CanonicalID', checkData.canonicalIds, log);
|
|
||||||
nerr += _dumpCountError('Email', checkData.emails, log);
|
|
||||||
nerr += _dumpCountError('ARN', checkData.arns, log);
|
|
||||||
nerr += _dumpCountError('AccessKey', checkData.keys, log);
|
|
||||||
nerr += _dumpCountError('SAS Token', checkData.sasTokens, log);
|
|
||||||
|
|
||||||
if (checkData.errors.length > 0) {
|
|
||||||
checkData.errors.forEach(msg => {
|
|
||||||
log.error(msg.txt, msg.obj);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (checkData.errors.length === 0 && nerr === 0) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
log.fatal('invalid authentication config file (cannot start)');
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {object} authdata - the authentication config file's data
|
* @param {object} authdata - the authentication config file's data
|
||||||
* @param {werelogs.API} logApi - object providing a constructor function
|
* @param {werelogs.API} logApi - object providing a constructor function
|
||||||
* for the Logger object
|
* for the Logger object
|
||||||
* @param {(boolean|null)} checkSas - whether to check Azure SAS for ea. account
|
|
||||||
* @return {boolean} true on erroneous data
|
* @return {boolean} true on erroneous data
|
||||||
* false on success
|
* false on success
|
||||||
*/
|
*/
|
||||||
function validateAuthConfig(authdata, logApi, checkSas) {
|
function validateAuthConfig(authdata, logApi) {
|
||||||
const checkData = {
|
const authLoader = new AuthLoader(logApi);
|
||||||
errors: [],
|
authLoader.addAccounts(authdata);
|
||||||
emails: [],
|
return !authLoader.validate();
|
||||||
arns: [],
|
|
||||||
canonicalIds: [],
|
|
||||||
keys: [],
|
|
||||||
sasTokens: [],
|
|
||||||
};
|
|
||||||
const log = new (logApi || werelogs).Logger('S3');
|
|
||||||
|
|
||||||
|
|
||||||
if (authdata.accounts === undefined) {
|
|
||||||
checkData.errors.push({
|
|
||||||
txt: 'no "accounts" array defined in Auth config',
|
|
||||||
});
|
|
||||||
return _dumpErrors(checkData, log);
|
|
||||||
}
|
|
||||||
|
|
||||||
authdata.accounts.forEach(account => {
|
|
||||||
_checkAccount(checkData, account, checkSas);
|
|
||||||
});
|
|
||||||
|
|
||||||
return _dumpErrors(checkData, log);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = validateAuthConfig;
|
module.exports = validateAuthConfig;
|
||||||
|
|
|
@ -27,7 +27,7 @@ function check(request, log, data) {
|
||||||
milliseconds to compare to Date.now()
|
milliseconds to compare to Date.now()
|
||||||
*/
|
*/
|
||||||
const expirationTime = parseInt(data.Expires, 10) * 1000;
|
const expirationTime = parseInt(data.Expires, 10) * 1000;
|
||||||
if (isNaN(expirationTime)) {
|
if (Number.isNaN(expirationTime)) {
|
||||||
log.debug('invalid expires parameter',
|
log.debug('invalid expires parameter',
|
||||||
{ expires: data.Expires });
|
{ expires: data.Expires });
|
||||||
return { err: errors.MissingSecurityHeader };
|
return { err: errors.MissingSecurityHeader };
|
||||||
|
|
|
@ -10,17 +10,13 @@ const createCanonicalRequest = require('./createCanonicalRequest');
|
||||||
* @returns {string} - stringToSign
|
* @returns {string} - stringToSign
|
||||||
*/
|
*/
|
||||||
function constructStringToSign(params) {
|
function constructStringToSign(params) {
|
||||||
const request = params.request;
|
const { request, signedHeaders, payloadChecksum, credentialScope, timestamp,
|
||||||
const signedHeaders = params.signedHeaders;
|
query, log, proxyPath } = params;
|
||||||
const payloadChecksum = params.payloadChecksum;
|
const path = proxyPath || request.path;
|
||||||
const credentialScope = params.credentialScope;
|
|
||||||
const timestamp = params.timestamp;
|
|
||||||
const query = params.query;
|
|
||||||
const log = params.log;
|
|
||||||
|
|
||||||
const canonicalReqResult = createCanonicalRequest({
|
const canonicalReqResult = createCanonicalRequest({
|
||||||
pHttpVerb: request.method,
|
pHttpVerb: request.method,
|
||||||
pResource: request.path,
|
pResource: path,
|
||||||
pQuery: query,
|
pQuery: query,
|
||||||
pHeaders: request.headers,
|
pHeaders: request.headers,
|
||||||
pSignedHeaders: signedHeaders,
|
pSignedHeaders: signedHeaders,
|
||||||
|
|
|
@ -40,7 +40,8 @@ function createCanonicalRequest(params) {
|
||||||
|
|
||||||
// canonical query string
|
// canonical query string
|
||||||
let canonicalQueryStr = '';
|
let canonicalQueryStr = '';
|
||||||
if (pQuery && !((service === 'iam' || service === 'ring') &&
|
if (pQuery && !((service === 'iam' || service === 'ring' ||
|
||||||
|
service === 'sts') &&
|
||||||
pHttpVerb === 'POST')) {
|
pHttpVerb === 'POST')) {
|
||||||
const sortedQueryParams = Object.keys(pQuery).sort().map(key => {
|
const sortedQueryParams = Object.keys(pQuery).sort().map(key => {
|
||||||
const encodedKey = awsURIencode(key);
|
const encodedKey = awsURIencode(key);
|
||||||
|
|
|
@ -98,7 +98,7 @@ function check(request, log, data, awsService) {
|
||||||
log);
|
log);
|
||||||
if (validationResult instanceof Error) {
|
if (validationResult instanceof Error) {
|
||||||
log.debug('credentials in improper format', { credentialsArr,
|
log.debug('credentials in improper format', { credentialsArr,
|
||||||
timestamp, validationResult });
|
timestamp, validationResult });
|
||||||
return { err: validationResult };
|
return { err: validationResult };
|
||||||
}
|
}
|
||||||
// credentialsArr is [accessKey, date, region, aws-service, aws4_request]
|
// credentialsArr is [accessKey, date, region, aws-service, aws4_request]
|
||||||
|
|
|
@ -48,7 +48,7 @@ function check(request, log, data) {
|
||||||
log);
|
log);
|
||||||
if (validationResult instanceof Error) {
|
if (validationResult instanceof Error) {
|
||||||
log.debug('credentials in improper format', { credential,
|
log.debug('credentials in improper format', { credential,
|
||||||
timestamp, validationResult });
|
timestamp, validationResult });
|
||||||
return { err: validationResult };
|
return { err: validationResult };
|
||||||
}
|
}
|
||||||
const accessKey = credential[0];
|
const accessKey = credential[0];
|
||||||
|
|
|
@ -41,8 +41,9 @@ function validateCredentials(credentials, timestamp, log) {
|
||||||
{ scopeDate, timestampDate });
|
{ scopeDate, timestampDate });
|
||||||
return errors.RequestTimeTooSkewed;
|
return errors.RequestTimeTooSkewed;
|
||||||
}
|
}
|
||||||
if (service !== 's3' && service !== 'iam' && service !== 'ring') {
|
if (service !== 's3' && service !== 'iam' && service !== 'ring' &&
|
||||||
log.warn('service in credentials is not one of s3/iam/ring', {
|
service !== 'sts') {
|
||||||
|
log.warn('service in credentials is not one of s3/iam/ring/sts', {
|
||||||
service,
|
service,
|
||||||
});
|
});
|
||||||
return errors.InvalidArgument;
|
return errors.InvalidArgument;
|
||||||
|
|
|
@ -20,6 +20,7 @@ module.exports = {
|
||||||
// no authentication information. Requestor can access
|
// no authentication information. Requestor can access
|
||||||
// only public resources
|
// only public resources
|
||||||
publicId: 'http://acs.amazonaws.com/groups/global/AllUsers',
|
publicId: 'http://acs.amazonaws.com/groups/global/AllUsers',
|
||||||
|
zenkoServiceAccount: 'http://acs.zenko.io/accounts/service',
|
||||||
metadataFileNamespace: '/MDFile',
|
metadataFileNamespace: '/MDFile',
|
||||||
dataFileURL: '/DataFile',
|
dataFileURL: '/DataFile',
|
||||||
// AWS states max size for user-defined metadata
|
// AWS states max size for user-defined metadata
|
||||||
|
@ -29,4 +30,41 @@ module.exports = {
|
||||||
// so we do the same.
|
// so we do the same.
|
||||||
maximumMetaHeadersSize: 2136,
|
maximumMetaHeadersSize: 2136,
|
||||||
emptyFileMd5: 'd41d8cd98f00b204e9800998ecf8427e',
|
emptyFileMd5: 'd41d8cd98f00b204e9800998ecf8427e',
|
||||||
|
// Version 2 changes the format of the data location property
|
||||||
|
// Version 3 adds the dataStoreName attribute
|
||||||
|
mdModelVersion: 3,
|
||||||
|
/*
|
||||||
|
* Splitter is used to build the object name for the overview of a
|
||||||
|
* multipart upload and to build the object names for each part of a
|
||||||
|
* multipart upload. These objects with large names are then stored in
|
||||||
|
* metadata in a "shadow bucket" to a real bucket. The shadow bucket
|
||||||
|
* contains all ongoing multipart uploads. We include in the object
|
||||||
|
* name some of the info we might need to pull about an open multipart
|
||||||
|
* upload or about an individual part with each piece of info separated
|
||||||
|
* by the splitter. We can then extract each piece of info by splitting
|
||||||
|
* the object name string with this splitter.
|
||||||
|
* For instance, assuming a splitter of '...!*!',
|
||||||
|
* the name of the upload overview would be:
|
||||||
|
* overview...!*!objectKey...!*!uploadId
|
||||||
|
* For instance, the name of a part would be:
|
||||||
|
* uploadId...!*!partNumber
|
||||||
|
*
|
||||||
|
* The sequence of characters used in the splitter should not occur
|
||||||
|
* elsewhere in the pieces of info to avoid splitting where not
|
||||||
|
* intended.
|
||||||
|
*
|
||||||
|
* Splitter is also used in adding bucketnames to the
|
||||||
|
* namespacerusersbucket. The object names added to the
|
||||||
|
* namespaceusersbucket are of the form:
|
||||||
|
* canonicalID...!*!bucketname
|
||||||
|
*/
|
||||||
|
|
||||||
|
splitter: '..|..',
|
||||||
|
usersBucket: 'users..bucket',
|
||||||
|
// MPU Bucket Prefix is used to create the name of the shadow
|
||||||
|
// bucket used for multipart uploads. There is one shadow mpu
|
||||||
|
// bucket per bucket and its name is the mpuBucketPrefix followed
|
||||||
|
// by the name of the final destination bucket for the object
|
||||||
|
// once the multipart upload is complete.
|
||||||
|
mpuBucketPrefix: 'mpuShadowBucket',
|
||||||
};
|
};
|
||||||
|
|
|
@ -0,0 +1,54 @@
|
||||||
|
const Redis = require('ioredis');
|
||||||
|
|
||||||
|
class RedisClient {
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
* @param {Object} config - config
|
||||||
|
* @param {string} config.host - Redis host
|
||||||
|
* @param {number} config.port - Redis port
|
||||||
|
* @param {string} config.password - Redis password
|
||||||
|
* @param {werelogs.Logger} logger - logger instance
|
||||||
|
*/
|
||||||
|
constructor(config, logger) {
|
||||||
|
this._client = new Redis(config);
|
||||||
|
this._client.on('error', err =>
|
||||||
|
logger.trace('error from redis', {
|
||||||
|
error: err,
|
||||||
|
method: 'RedisClient.constructor',
|
||||||
|
redisHost: config.host,
|
||||||
|
redisPort: config.port,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* increment value of a key by 1 and set a ttl
|
||||||
|
* @param {string} key - key holding the value
|
||||||
|
* @param {number} expiry - expiry in seconds
|
||||||
|
* @param {callback} cb - callback
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
incrEx(key, expiry, cb) {
|
||||||
|
return this._client
|
||||||
|
.multi([['incr', key], ['expire', key, expiry]])
|
||||||
|
.exec(cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* execute a batch of commands
|
||||||
|
* @param {string[]} cmds - list of commands
|
||||||
|
* @param {callback} cb - callback
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
batch(cmds, cb) {
|
||||||
|
return this._client.pipeline(cmds).exec(cb);
|
||||||
|
}
|
||||||
|
|
||||||
|
clear(cb) {
|
||||||
|
return this._client.flushdb(cb);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = RedisClient;
|
|
@ -0,0 +1,150 @@
|
||||||
|
const async = require('async');
|
||||||
|
|
||||||
|
class StatsClient {
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
* @param {object} redisClient - RedisClient instance
|
||||||
|
* @param {number} interval - sampling interval by seconds
|
||||||
|
* @param {number} expiry - sampling duration by seconds
|
||||||
|
*/
|
||||||
|
constructor(redisClient, interval, expiry) {
|
||||||
|
this._redis = redisClient;
|
||||||
|
this._interval = interval;
|
||||||
|
this._expiry = expiry;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Utility function to use when callback is undefined
|
||||||
|
*/
|
||||||
|
_noop() {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* normalize to the nearest interval
|
||||||
|
* @param {object} d - Date instance
|
||||||
|
* @return {number} timestamp - normalized to the nearest interval
|
||||||
|
*/
|
||||||
|
_normalizeTimestamp(d) {
|
||||||
|
const s = d.getSeconds();
|
||||||
|
return d.setSeconds(s - s % this._interval, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* set timestamp to the previous interval
|
||||||
|
* @param {object} d - Date instance
|
||||||
|
* @return {number} timestamp - set to the previous interval
|
||||||
|
*/
|
||||||
|
_setPrevInterval(d) {
|
||||||
|
return d.setSeconds(d.getSeconds() - this._interval);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* build redis key to get total number of occurrences on the server
|
||||||
|
* @param {string} name - key name identifier
|
||||||
|
* @param {object} d - Date instance
|
||||||
|
* @return {string} key - key for redis
|
||||||
|
*/
|
||||||
|
_buildKey(name, d) {
|
||||||
|
return `${name}:${this._normalizeTimestamp(d)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* reduce the array of values to a single value
|
||||||
|
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
|
||||||
|
* @param {array} arr - Date instance
|
||||||
|
* @return {string} key - key for redis
|
||||||
|
*/
|
||||||
|
_getCount(arr) {
|
||||||
|
return arr.reduce((prev, a) => {
|
||||||
|
let num = parseInt(a[1], 10);
|
||||||
|
num = Number.isNaN(num) ? 0 : num;
|
||||||
|
return prev + num;
|
||||||
|
}, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* report/record a new request received on the server
|
||||||
|
* @param {string} id - service identifier
|
||||||
|
* @param {callback} cb - callback
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
reportNewRequest(id, cb) {
|
||||||
|
if (!this._redis) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const callback = cb || this._noop;
|
||||||
|
const key = this._buildKey(`${id}:requests`, new Date());
|
||||||
|
return this._redis.incrEx(key, this._expiry, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* report/record a request that ended up being a 500 on the server
|
||||||
|
* @param {string} id - service identifier
|
||||||
|
* @param {callback} cb - callback
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
report500(id, cb) {
|
||||||
|
if (!this._redis) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const callback = cb || this._noop;
|
||||||
|
const key = this._buildKey(`${id}:500s`, new Date());
|
||||||
|
return this._redis.incrEx(key, this._expiry, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get stats for the last x seconds, x being the sampling duration
|
||||||
|
* @param {object} log - Werelogs request logger
|
||||||
|
* @param {string} id - service identifier
|
||||||
|
* @param {callback} cb - callback to call with the err/result
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
getStats(log, id, cb) {
|
||||||
|
if (!this._redis) {
|
||||||
|
return cb(null, {});
|
||||||
|
}
|
||||||
|
const d = new Date();
|
||||||
|
const totalKeys = Math.floor(this._expiry / this._interval);
|
||||||
|
const reqsKeys = [];
|
||||||
|
const req500sKeys = [];
|
||||||
|
for (let i = 0; i < totalKeys; i++) {
|
||||||
|
reqsKeys.push(['get', this._buildKey(`${id}:requests`, d)]);
|
||||||
|
req500sKeys.push(['get', this._buildKey(`${id}:500s`, d)]);
|
||||||
|
this._setPrevInterval(d);
|
||||||
|
}
|
||||||
|
return async.parallel([
|
||||||
|
next => this._redis.batch(reqsKeys, next),
|
||||||
|
next => this._redis.batch(req500sKeys, next),
|
||||||
|
], (err, results) => {
|
||||||
|
/**
|
||||||
|
* Batch result is of the format
|
||||||
|
* [ [null, '1'], [null, '2'], [null, '3'] ] where each
|
||||||
|
* item is the result of the each batch command
|
||||||
|
* Foreach item in the result, index 0 signifies the error and
|
||||||
|
* index 1 contains the result
|
||||||
|
*/
|
||||||
|
const statsRes = {
|
||||||
|
'requests': 0,
|
||||||
|
'500s': 0,
|
||||||
|
'sampleDuration': this._expiry,
|
||||||
|
};
|
||||||
|
if (err) {
|
||||||
|
log.error('error getting stats', {
|
||||||
|
error: err,
|
||||||
|
method: 'StatsClient.getStats',
|
||||||
|
});
|
||||||
|
/**
|
||||||
|
* Redis for stats is not a critial component, ignoring
|
||||||
|
* any error here as returning an InternalError
|
||||||
|
* would be confused with the health of the service
|
||||||
|
*/
|
||||||
|
return cb(null, statsRes);
|
||||||
|
}
|
||||||
|
statsRes.requests = this._getCount(results[0]);
|
||||||
|
statsRes['500s'] = this._getCount(results[1]);
|
||||||
|
return cb(null, statsRes);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = StatsClient;
|
|
@ -0,0 +1,106 @@
|
||||||
|
const errors = require('../errors');
|
||||||
|
|
||||||
|
const validServices = {
|
||||||
|
aws: ['s3', 'iam', 'sts', 'ring'],
|
||||||
|
scality: ['utapi', 'sso'],
|
||||||
|
};
|
||||||
|
|
||||||
|
class ARN {
|
||||||
|
/**
|
||||||
|
*
|
||||||
|
* Create an ARN object from its individual components
|
||||||
|
*
|
||||||
|
* @constructor
|
||||||
|
* @param {string} partition - ARN partition (e.g. 'aws')
|
||||||
|
* @param {string} service - service name in partition (e.g. 's3')
|
||||||
|
* @param {string} [region] - AWS region
|
||||||
|
* @param {string} [accountId] - AWS 12-digit account ID
|
||||||
|
* @param {string} resource - AWS resource path (e.g. 'foo/bar')
|
||||||
|
*/
|
||||||
|
constructor(partition, service, region, accountId, resource) {
|
||||||
|
this._partition = partition;
|
||||||
|
this._service = service;
|
||||||
|
this._region = region || null;
|
||||||
|
this._accountId = accountId || null;
|
||||||
|
this._resource = resource;
|
||||||
|
}
|
||||||
|
|
||||||
|
static createFromString(arnStr) {
|
||||||
|
const [arn, partition, service, region, accountId,
|
||||||
|
resourceType, resource] = arnStr.split(':');
|
||||||
|
|
||||||
|
if (arn !== 'arn') {
|
||||||
|
return { error: errors.InvalidArgument.customizeDescription(
|
||||||
|
'bad ARN: must start with "arn:"') };
|
||||||
|
}
|
||||||
|
if (!partition) {
|
||||||
|
return { error: errors.InvalidArgument.customizeDescription(
|
||||||
|
'bad ARN: must include a partition name, like "aws" in ' +
|
||||||
|
'"arn:aws:..."') };
|
||||||
|
}
|
||||||
|
if (!service) {
|
||||||
|
return { error: errors.InvalidArgument.customizeDescription(
|
||||||
|
'bad ARN: must include a service name, like "s3" in ' +
|
||||||
|
'"arn:aws:s3:..."') };
|
||||||
|
}
|
||||||
|
if (validServices[partition] === undefined) {
|
||||||
|
return { error: errors.InvalidArgument.customizeDescription(
|
||||||
|
`bad ARN: unknown partition "${partition}", should be a ` +
|
||||||
|
'valid partition name like "aws" in "arn:aws:..."') };
|
||||||
|
}
|
||||||
|
if (!validServices[partition].includes(service)) {
|
||||||
|
return { error: errors.InvalidArgument.customizeDescription(
|
||||||
|
`bad ARN: unsupported ${partition} service "${service}"`) };
|
||||||
|
}
|
||||||
|
if (accountId && !/^([0-9]{12}|[*])$/.test(accountId)) {
|
||||||
|
return { error: errors.InvalidArgument.customizeDescription(
|
||||||
|
`bad ARN: bad account ID "${accountId}": ` +
|
||||||
|
'must be a 12-digit number or "*"') };
|
||||||
|
}
|
||||||
|
const fullResource = (resource !== undefined ?
|
||||||
|
`${resourceType}:${resource}` : resourceType);
|
||||||
|
return new ARN(partition, service, region, accountId, fullResource);
|
||||||
|
}
|
||||||
|
|
||||||
|
getPartition() {
|
||||||
|
return this._partition;
|
||||||
|
}
|
||||||
|
getService() {
|
||||||
|
return this._service;
|
||||||
|
}
|
||||||
|
getRegion() {
|
||||||
|
return this._region;
|
||||||
|
}
|
||||||
|
getAccountId() {
|
||||||
|
return this._accountId;
|
||||||
|
}
|
||||||
|
getResource() {
|
||||||
|
return this._resource;
|
||||||
|
}
|
||||||
|
|
||||||
|
isIAMAccount() {
|
||||||
|
return this.getService() === 'iam'
|
||||||
|
&& this.getAccountId() !== null
|
||||||
|
&& this.getAccountId() !== '*'
|
||||||
|
&& this.getResource() === 'root';
|
||||||
|
}
|
||||||
|
isIAMUser() {
|
||||||
|
return this.getService() === 'iam'
|
||||||
|
&& this.getAccountId() !== null
|
||||||
|
&& this.getAccountId() !== '*'
|
||||||
|
&& this.getResource().startsWith('user/');
|
||||||
|
}
|
||||||
|
isIAMRole() {
|
||||||
|
return this.getService() === 'iam'
|
||||||
|
&& this.getAccountId() !== null
|
||||||
|
&& this.getResource().startsWith('role');
|
||||||
|
}
|
||||||
|
|
||||||
|
toString() {
|
||||||
|
return ['arn', this.getPartition(), this.getService(),
|
||||||
|
this.getRegion(), this.getAccountId(), this.getResource()]
|
||||||
|
.join(':');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = ARN;
|
|
@ -1,23 +1,66 @@
|
||||||
|
const constants = require('../constants');
|
||||||
|
const VersionIDUtils = require('../versioning/VersionID');
|
||||||
|
|
||||||
// Version 2 changes the format of the data location property
|
const ObjectMDLocation = require('./ObjectMDLocation');
|
||||||
// Version 3 adds the dataStoreName attribute
|
|
||||||
const modelVersion = 3;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class to manage metadata object for regular s3 objects (instead of
|
* Class to manage metadata object for regular s3 objects (instead of
|
||||||
* mpuPart metadata for example)
|
* mpuPart metadata for example)
|
||||||
*/
|
*/
|
||||||
module.exports = class ObjectMD {
|
class ObjectMD {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @constructor
|
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
|
||||||
|
* reserved for internal use, users should call
|
||||||
|
* {@link ObjectMD.createFromBlob()} to load from a stored
|
||||||
|
* metadata blob and check the returned value for errors.
|
||||||
*
|
*
|
||||||
* @param {number} version - Version of the metadata model
|
* @constructor
|
||||||
|
* @param {ObjectMD|object} [objMd] - object metadata source,
|
||||||
|
* either an ObjectMD instance or a native JS object parsed from
|
||||||
|
* JSON
|
||||||
*/
|
*/
|
||||||
constructor(version) {
|
constructor(objMd = undefined) {
|
||||||
const now = new Date().toJSON();
|
this._initMd();
|
||||||
|
if (objMd !== undefined) {
|
||||||
|
if (objMd instanceof ObjectMD) {
|
||||||
|
this._updateFromObjectMD(objMd);
|
||||||
|
} else {
|
||||||
|
this._updateFromParsedJSON(objMd);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// set newly-created object md modified time to current time
|
||||||
|
this._data['last-modified'] = new Date().toJSON();
|
||||||
|
}
|
||||||
|
// set latest md model version now that we ensured
|
||||||
|
// backward-compat conversion
|
||||||
|
this._data['md-model-version'] = constants.mdModelVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* create an ObjectMD instance from stored metadata
|
||||||
|
*
|
||||||
|
* @param {String|Buffer} storedBlob - serialized metadata blob
|
||||||
|
* @return {object} a result object containing either a 'result'
|
||||||
|
* property which value is a new ObjectMD instance on success, or
|
||||||
|
* an 'error' property on error
|
||||||
|
*/
|
||||||
|
static createFromBlob(storedBlob) {
|
||||||
|
try {
|
||||||
|
const objMd = JSON.parse(storedBlob);
|
||||||
|
return { result: new ObjectMD(objMd) };
|
||||||
|
} catch (err) {
|
||||||
|
return { error: err };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getSerialized() {
|
||||||
|
return JSON.stringify(this.getValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
_initMd() {
|
||||||
|
// initialize md with default values
|
||||||
this._data = {
|
this._data = {
|
||||||
'md-model-version': version || modelVersion,
|
|
||||||
'owner-display-name': '',
|
'owner-display-name': '',
|
||||||
'owner-id': '',
|
'owner-id': '',
|
||||||
'cache-control': '',
|
'cache-control': '',
|
||||||
|
@ -26,7 +69,6 @@ module.exports = class ObjectMD {
|
||||||
'expires': '',
|
'expires': '',
|
||||||
'content-length': 0,
|
'content-length': 0,
|
||||||
'content-type': '',
|
'content-type': '',
|
||||||
'last-modified': now,
|
|
||||||
'content-md5': '',
|
'content-md5': '',
|
||||||
// simple/no version. will expand once object versioning is
|
// simple/no version. will expand once object versioning is
|
||||||
// introduced
|
// introduced
|
||||||
|
@ -48,7 +90,7 @@ module.exports = class ObjectMD {
|
||||||
READ_ACP: [],
|
READ_ACP: [],
|
||||||
},
|
},
|
||||||
'key': '',
|
'key': '',
|
||||||
'location': [],
|
'location': null,
|
||||||
'isNull': '',
|
'isNull': '',
|
||||||
'nullVersionId': '',
|
'nullVersionId': '',
|
||||||
'isDeleteMarker': '',
|
'isDeleteMarker': '',
|
||||||
|
@ -60,18 +102,37 @@ module.exports = class ObjectMD {
|
||||||
destination: '',
|
destination: '',
|
||||||
storageClass: '',
|
storageClass: '',
|
||||||
role: '',
|
role: '',
|
||||||
|
storageType: '',
|
||||||
|
dataStoreVersionId: '',
|
||||||
},
|
},
|
||||||
'dataStoreName': '',
|
'dataStoreName': '',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
_updateFromObjectMD(objMd) {
|
||||||
* Returns metadata model version
|
// We only duplicate selected attributes here, where setters
|
||||||
*
|
// allow to change inner values, and let the others as shallow
|
||||||
* @return {number} Metadata model version
|
// copies. Since performance is a concern, we want to avoid
|
||||||
*/
|
// the JSON.parse(JSON.stringify()) method.
|
||||||
getModelVersion() {
|
|
||||||
return this._data['md-model-version'];
|
Object.assign(this._data, objMd._data);
|
||||||
|
Object.assign(this._data.replicationInfo,
|
||||||
|
objMd._data.replicationInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
_updateFromParsedJSON(objMd) {
|
||||||
|
// objMd is a new JS object created for the purpose, it's safe
|
||||||
|
// to just assign its top-level properties.
|
||||||
|
|
||||||
|
Object.assign(this._data, objMd);
|
||||||
|
this._convertToLatestModel();
|
||||||
|
}
|
||||||
|
|
||||||
|
_convertToLatestModel() {
|
||||||
|
// handle backward-compat stuff
|
||||||
|
if (typeof(this._data.location) === 'string') {
|
||||||
|
this.setLocation([{ key: this._data.location }]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -462,21 +523,53 @@ module.exports = class ObjectMD {
|
||||||
/**
|
/**
|
||||||
* Set location
|
* Set location
|
||||||
*
|
*
|
||||||
* @param {string[]} location - location
|
* @param {object[]} location - array of data locations (see
|
||||||
|
* constructor of {@link ObjectMDLocation} for a description of
|
||||||
|
* fields for each array object)
|
||||||
* @return {ObjectMD} itself
|
* @return {ObjectMD} itself
|
||||||
*/
|
*/
|
||||||
setLocation(location) {
|
setLocation(location) {
|
||||||
this._data.location = location;
|
if (!Array.isArray(location) || location.length === 0) {
|
||||||
|
this._data.location = null;
|
||||||
|
} else {
|
||||||
|
this._data.location = location;
|
||||||
|
}
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns location
|
* Returns location
|
||||||
*
|
*
|
||||||
* @return {string[]} location
|
* @return {object[]} location
|
||||||
*/
|
*/
|
||||||
getLocation() {
|
getLocation() {
|
||||||
return this._data.location;
|
const { location } = this._data;
|
||||||
|
return Array.isArray(location) ? location : [];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Object metadata may contain multiple elements for a single part if
|
||||||
|
// the part was originally copied from another MPU. Here we reduce the
|
||||||
|
// locations array to a single element for each part.
|
||||||
|
getReducedLocations() {
|
||||||
|
const locations = this.getLocation();
|
||||||
|
const reducedLocations = [];
|
||||||
|
let partTotal = 0;
|
||||||
|
for (let i = 0; i < locations.length; i++) {
|
||||||
|
const currPart = new ObjectMDLocation(locations[i]);
|
||||||
|
const currPartNum = currPart.getPartNumber();
|
||||||
|
let nextPartNum = undefined;
|
||||||
|
if (i < locations.length - 1) {
|
||||||
|
const nextPart = new ObjectMDLocation(locations[i + 1]);
|
||||||
|
nextPartNum = nextPart.getPartNumber();
|
||||||
|
}
|
||||||
|
partTotal += currPart.getPartSize();
|
||||||
|
if (currPartNum !== nextPartNum) {
|
||||||
|
currPart.setPartSize(partTotal);
|
||||||
|
reducedLocations.push(currPart.getValue());
|
||||||
|
partTotal = 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return reducedLocations;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -559,6 +652,16 @@ module.exports = class ObjectMD {
|
||||||
return this._data.versionId;
|
return this._data.versionId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get metadata versionId value in encoded form (the one visible
|
||||||
|
* to the S3 API user)
|
||||||
|
*
|
||||||
|
* @return {string} The encoded object versionId
|
||||||
|
*/
|
||||||
|
getEncodedVersionId() {
|
||||||
|
return VersionIDUtils.encode(this.getVersionId());
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set tags
|
* Set tags
|
||||||
*
|
*
|
||||||
|
@ -586,14 +689,16 @@ module.exports = class ObjectMD {
|
||||||
* @return {ObjectMD} itself
|
* @return {ObjectMD} itself
|
||||||
*/
|
*/
|
||||||
setReplicationInfo(replicationInfo) {
|
setReplicationInfo(replicationInfo) {
|
||||||
const { status, content, destination, storageClass, role } =
|
const { status, content, destination, storageClass, role,
|
||||||
replicationInfo;
|
storageType, dataStoreVersionId } = replicationInfo;
|
||||||
this._data.replicationInfo = {
|
this._data.replicationInfo = {
|
||||||
status,
|
status,
|
||||||
content,
|
content,
|
||||||
destination,
|
destination,
|
||||||
storageClass: storageClass || '',
|
storageClass: storageClass || '',
|
||||||
role,
|
role,
|
||||||
|
storageType: storageType || '',
|
||||||
|
dataStoreVersionId: dataStoreVersionId || '',
|
||||||
};
|
};
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -607,6 +712,45 @@ module.exports = class ObjectMD {
|
||||||
return this._data.replicationInfo;
|
return this._data.replicationInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
setReplicationStatus(status) {
|
||||||
|
this._data.replicationInfo.status = status;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
setReplicationDataStoreVersionId(versionId) {
|
||||||
|
this._data.replicationInfo.dataStoreVersionId = versionId;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getReplicationDataStoreVersionId() {
|
||||||
|
return this._data.replicationInfo.dataStoreVersionId;
|
||||||
|
}
|
||||||
|
|
||||||
|
getReplicationStatus() {
|
||||||
|
return this._data.replicationInfo.status;
|
||||||
|
}
|
||||||
|
|
||||||
|
getReplicationContent() {
|
||||||
|
return this._data.replicationInfo.content;
|
||||||
|
}
|
||||||
|
|
||||||
|
getReplicationRoles() {
|
||||||
|
return this._data.replicationInfo.role;
|
||||||
|
}
|
||||||
|
|
||||||
|
getReplicationStorageType() {
|
||||||
|
return this._data.replicationInfo.storageType;
|
||||||
|
}
|
||||||
|
|
||||||
|
getReplicationStorageClass() {
|
||||||
|
return this._data.replicationInfo.storageClass;
|
||||||
|
}
|
||||||
|
|
||||||
|
getReplicationTargetBucket() {
|
||||||
|
const destBucketArn = this._data.replicationInfo.destination;
|
||||||
|
return destBucketArn.split(':').slice(-1)[0];
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set dataStoreName
|
* Set dataStoreName
|
||||||
*
|
*
|
||||||
|
@ -627,6 +771,19 @@ module.exports = class ObjectMD {
|
||||||
return this._data.dataStoreName;
|
return this._data.dataStoreName;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get dataStoreVersionId
|
||||||
|
*
|
||||||
|
* @return {string} external backend version id for data
|
||||||
|
*/
|
||||||
|
getDataStoreVersionId() {
|
||||||
|
const location = this.getLocation();
|
||||||
|
if (!location[0]) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return location[0].dataStoreVersionId;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set custom meta headers
|
* Set custom meta headers
|
||||||
*
|
*
|
||||||
|
@ -665,4 +822,6 @@ module.exports = class ObjectMD {
|
||||||
getValue() {
|
getValue() {
|
||||||
return this._data;
|
return this._data;
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
|
module.exports = ObjectMD;
|
||||||
|
|
|
@ -0,0 +1,72 @@
|
||||||
|
/**
|
||||||
|
* Helper class to ease access to a single data location in metadata
|
||||||
|
* 'location' array
|
||||||
|
*/
|
||||||
|
class ObjectMDLocation {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
* @param {object} locationObj - single data location info
|
||||||
|
* @param {string} locationObj.key - data backend key
|
||||||
|
* @param {number} locationObj.start - index of first data byte of
|
||||||
|
* this part in the full object
|
||||||
|
* @param {number} locationObj.size - byte length of data part
|
||||||
|
* @param {string} locationObj.dataStoreName - type of data store
|
||||||
|
* @param {string} locationObj.dataStoreETag - internal ETag of
|
||||||
|
* data part
|
||||||
|
*/
|
||||||
|
constructor(locationObj) {
|
||||||
|
this._data = {
|
||||||
|
key: locationObj.key,
|
||||||
|
start: locationObj.start,
|
||||||
|
size: locationObj.size,
|
||||||
|
dataStoreName: locationObj.dataStoreName,
|
||||||
|
dataStoreETag: locationObj.dataStoreETag,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
getKey() {
|
||||||
|
return this._data.key;
|
||||||
|
}
|
||||||
|
|
||||||
|
getDataStoreName() {
|
||||||
|
return this._data.dataStoreName;
|
||||||
|
}
|
||||||
|
|
||||||
|
setDataLocation(location) {
|
||||||
|
this._data.key = location.key;
|
||||||
|
this._data.dataStoreName = location.dataStoreName;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getDataStoreETag() {
|
||||||
|
return this._data.dataStoreETag;
|
||||||
|
}
|
||||||
|
|
||||||
|
getPartNumber() {
|
||||||
|
return Number.parseInt(this._data.dataStoreETag.split(':')[0], 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
getPartETag() {
|
||||||
|
return this._data.dataStoreETag.split(':')[1];
|
||||||
|
}
|
||||||
|
|
||||||
|
getPartStart() {
|
||||||
|
return this._data.start;
|
||||||
|
}
|
||||||
|
|
||||||
|
getPartSize() {
|
||||||
|
return this._data.size;
|
||||||
|
}
|
||||||
|
|
||||||
|
setPartSize(size) {
|
||||||
|
this._data.size = size;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
getValue() {
|
||||||
|
return this._data;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = ObjectMDLocation;
|
|
@ -58,6 +58,8 @@ class ReplicationConfiguration {
|
||||||
this._role = null;
|
this._role = null;
|
||||||
this._destination = null;
|
this._destination = null;
|
||||||
this._rules = null;
|
this._rules = null;
|
||||||
|
this._prevStorageClass = null;
|
||||||
|
this._isExternalLocation = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -148,11 +150,16 @@ class ReplicationConfiguration {
|
||||||
}
|
}
|
||||||
const role = parsedRole[0];
|
const role = parsedRole[0];
|
||||||
const rolesArr = role.split(',');
|
const rolesArr = role.split(',');
|
||||||
if (rolesArr.length !== 2) {
|
if (!this._isExternalLocation && rolesArr.length !== 2) {
|
||||||
return errors.InvalidArgument.customizeDescription(
|
return errors.InvalidArgument.customizeDescription(
|
||||||
'Invalid Role specified in replication configuration: ' +
|
'Invalid Role specified in replication configuration: ' +
|
||||||
'Role must be a comma-separated list of two IAM roles');
|
'Role must be a comma-separated list of two IAM roles');
|
||||||
}
|
}
|
||||||
|
if (this._isExternalLocation && rolesArr.length > 1) {
|
||||||
|
return errors.InvalidArgument.customizeDescription(
|
||||||
|
'Invalid Role specified in replication configuration: ' +
|
||||||
|
'Role may not contain a comma separator');
|
||||||
|
}
|
||||||
const invalidRole = rolesArr.find(r => !this._isValidRoleARN(r));
|
const invalidRole = rolesArr.find(r => !this._isValidRoleARN(r));
|
||||||
if (invalidRole !== undefined) {
|
if (invalidRole !== undefined) {
|
||||||
return errors.InvalidArgument.customizeDescription(
|
return errors.InvalidArgument.customizeDescription(
|
||||||
|
@ -268,22 +275,6 @@ class ReplicationConfiguration {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Check that the `StorageClass` is a valid class
|
|
||||||
* @param {string} storageClass - The storage class to validate
|
|
||||||
* @return {boolean} `true` if valid, otherwise `false`
|
|
||||||
*/
|
|
||||||
_isValidStorageClass(storageClass) {
|
|
||||||
if (!this._config) {
|
|
||||||
return validStorageClasses.includes(storageClass);
|
|
||||||
}
|
|
||||||
|
|
||||||
const replicationEndpoints = this._config.replicationEndpoints
|
|
||||||
.map(endpoint => endpoint.site);
|
|
||||||
return replicationEndpoints.includes(storageClass) ||
|
|
||||||
validStorageClasses.includes(storageClass);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check that the `StorageClass` property is valid
|
* Check that the `StorageClass` property is valid
|
||||||
* @param {object} destination - The destination object from this._parsedXML
|
* @param {object} destination - The destination object from this._parsedXML
|
||||||
|
@ -292,9 +283,28 @@ class ReplicationConfiguration {
|
||||||
_parseStorageClass(destination) {
|
_parseStorageClass(destination) {
|
||||||
const storageClass = destination.StorageClass &&
|
const storageClass = destination.StorageClass &&
|
||||||
destination.StorageClass[0];
|
destination.StorageClass[0];
|
||||||
if (!this._isValidStorageClass(storageClass)) {
|
if (!this._config) {
|
||||||
|
return validStorageClasses.includes(storageClass);
|
||||||
|
}
|
||||||
|
const replicationEndpoints = this._config.replicationEndpoints
|
||||||
|
.map(endpoint => endpoint.site);
|
||||||
|
const locationConstraints =
|
||||||
|
Object.keys(this._config.locationConstraints);
|
||||||
|
if (locationConstraints.includes(storageClass)) {
|
||||||
|
if (this._prevStorageClass !== null &&
|
||||||
|
this._prevStorageClass !== storageClass) {
|
||||||
|
return errors.InvalidRequest.customizeDescription(
|
||||||
|
'The storage class must be same for all rules when ' +
|
||||||
|
'replicating objects to an external location');
|
||||||
|
}
|
||||||
|
this._isExternalLocation = true;
|
||||||
|
}
|
||||||
|
if (!replicationEndpoints.includes(storageClass) &&
|
||||||
|
!locationConstraints.includes(storageClass) &&
|
||||||
|
!validStorageClasses.includes(storageClass)) {
|
||||||
return errors.MalformedXML;
|
return errors.MalformedXML;
|
||||||
}
|
}
|
||||||
|
this._prevStorageClass = storageClass;
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -359,11 +369,11 @@ class ReplicationConfiguration {
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
parseConfiguration() {
|
parseConfiguration() {
|
||||||
const err = this._parseRole() || this._parseRules();
|
const err = this._parseRules();
|
||||||
if (err) {
|
if (err) {
|
||||||
return err;
|
return err;
|
||||||
}
|
}
|
||||||
return undefined;
|
return this._parseRole();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -34,8 +34,6 @@ class RoundRobin {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'at least one host must be provided for round robin');
|
'at least one host must be provided for round robin');
|
||||||
}
|
}
|
||||||
this.hostsList = hostsList.map(item => this._validateHostObj(item));
|
|
||||||
|
|
||||||
if (options && options.logger) {
|
if (options && options.logger) {
|
||||||
this.logger = options.logger;
|
this.logger = options.logger;
|
||||||
}
|
}
|
||||||
|
@ -44,6 +42,11 @@ class RoundRobin {
|
||||||
} else {
|
} else {
|
||||||
this.stickyCount = DEFAULT_STICKY_COUNT;
|
this.stickyCount = DEFAULT_STICKY_COUNT;
|
||||||
}
|
}
|
||||||
|
if (options && options.defaultPort) {
|
||||||
|
this.defaultPort = Number.parseInt(options.defaultPort, 10);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.hostsList = hostsList.map(item => this._validateHostObj(item));
|
||||||
|
|
||||||
// TODO: add blacklisting capability
|
// TODO: add blacklisting capability
|
||||||
|
|
||||||
|
@ -90,7 +93,8 @@ class RoundRobin {
|
||||||
port: parsedPort,
|
port: parsedPort,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
return { host: hostItemObj.host };
|
return { host: hostItemObj.host,
|
||||||
|
port: this.defaultPort };
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -66,7 +66,7 @@ function getByteRangeFromSpec(rangeSpec, objectSize) {
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
return { range: [Math.max(objectSize - rangeSpec.suffix, 0),
|
return { range: [Math.max(objectSize - rangeSpec.suffix, 0),
|
||||||
objectSize - 1] };
|
objectSize - 1] };
|
||||||
}
|
}
|
||||||
if (rangeSpec.start < objectSize) {
|
if (rangeSpec.start < objectSize) {
|
||||||
// test is false if end is undefined
|
// test is false if end is undefined
|
||||||
|
@ -105,5 +105,5 @@ function parseRange(rangeHeader, objectSize) {
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { parseRangeSpec,
|
module.exports = { parseRangeSpec,
|
||||||
getByteRangeFromSpec,
|
getByteRangeFromSpec,
|
||||||
parseRange };
|
parseRange };
|
||||||
|
|
|
@ -31,10 +31,10 @@ function sendError(res, log, error, optMessage) {
|
||||||
message = error.description || '';
|
message = error.description || '';
|
||||||
}
|
}
|
||||||
log.debug('sending back error response', { httpCode: error.code,
|
log.debug('sending back error response', { httpCode: error.code,
|
||||||
errorType: error.message,
|
errorType: error.message,
|
||||||
error: message });
|
error: message });
|
||||||
res.end(`${JSON.stringify({ errorType: error.message,
|
res.end(`${JSON.stringify({ errorType: error.message,
|
||||||
errorMessage: message })}\n`);
|
errorMessage: message })}\n`);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -150,7 +150,7 @@ class RESTServer extends httpServer {
|
||||||
const reqUids = req.headers['x-scal-request-uids'];
|
const reqUids = req.headers['x-scal-request-uids'];
|
||||||
const log = this.createLogger(reqUids);
|
const log = this.createLogger(reqUids);
|
||||||
log.debug('request received', { method: req.method,
|
log.debug('request received', { method: req.method,
|
||||||
url: req.url });
|
url: req.url });
|
||||||
if (req.method in this.reqMethods) {
|
if (req.method in this.reqMethods) {
|
||||||
this.reqMethods[req.method](req, res, log);
|
this.reqMethods[req.method](req, res, log);
|
||||||
} else {
|
} else {
|
||||||
|
@ -176,7 +176,7 @@ class RESTServer extends httpServer {
|
||||||
throw errors.MissingContentLength;
|
throw errors.MissingContentLength;
|
||||||
}
|
}
|
||||||
size = Number.parseInt(contentLength, 10);
|
size = Number.parseInt(contentLength, 10);
|
||||||
if (isNaN(size)) {
|
if (Number.isNaN(size)) {
|
||||||
throw errors.InvalidInput.customizeDescription(
|
throw errors.InvalidInput.customizeDescription(
|
||||||
'bad Content-Length');
|
'bad Content-Length');
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,7 +55,7 @@ class LevelDbClient extends rpc.BaseClient {
|
||||||
*/
|
*/
|
||||||
openSub(subName) {
|
openSub(subName) {
|
||||||
const subDbClient = new LevelDbClient({ url: this.url,
|
const subDbClient = new LevelDbClient({ url: this.url,
|
||||||
logger: this.logger });
|
logger: this.logger });
|
||||||
// make the same exposed RPC calls available from the sub-level object
|
// make the same exposed RPC calls available from the sub-level object
|
||||||
Object.assign(subDbClient, this);
|
Object.assign(subDbClient, this);
|
||||||
// listeners should not be duplicated on sublevel
|
// listeners should not be duplicated on sublevel
|
||||||
|
|
|
@ -293,7 +293,7 @@ class BaseService {
|
||||||
exposedAPI.push({ name: callName });
|
exposedAPI.push({ name: callName });
|
||||||
});
|
});
|
||||||
return { apiVersion: this.apiVersion,
|
return { apiVersion: this.apiVersion,
|
||||||
api: exposedAPI };
|
api: exposedAPI };
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -524,7 +524,7 @@ function RPCServer(params) {
|
||||||
function sendHTTPError(res, err) {
|
function sendHTTPError(res, err) {
|
||||||
res.writeHead(err.code || 500);
|
res.writeHead(err.code || 500);
|
||||||
return res.end(`${JSON.stringify({ error: err.message,
|
return res.end(`${JSON.stringify({ error: err.message,
|
||||||
message: err.description })}\n`);
|
message: err.description })}\n`);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -596,7 +596,7 @@ function objectStreamToJSON(rstream, wstream, cb) {
|
||||||
streamRPCJSONObj = function _streamRPCJSONObj(obj, wstream, cb) {
|
streamRPCJSONObj = function _streamRPCJSONObj(obj, wstream, cb) {
|
||||||
const cbOnce = jsutil.once(cb);
|
const cbOnce = jsutil.once(cb);
|
||||||
|
|
||||||
if (typeof(obj) === 'object') {
|
if (typeof obj === 'object') {
|
||||||
if (obj && obj.pipe !== undefined) {
|
if (obj && obj.pipe !== undefined) {
|
||||||
// stream object streams as JSON arrays
|
// stream object streams as JSON arrays
|
||||||
return objectStreamToJSON(obj, wstream, cbOnce);
|
return objectStreamToJSON(obj, wstream, cbOnce);
|
||||||
|
@ -734,7 +734,7 @@ function RESTServer(params) {
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
httpServer.registerServices = function registerServices(...serviceList) {
|
httpServer.registerServices = function registerServices(...serviceList) {
|
||||||
this.serviceList.push.apply(this.serviceList, serviceList);
|
this.serviceList.push(...serviceList);
|
||||||
};
|
};
|
||||||
|
|
||||||
return httpServer;
|
return httpServer;
|
||||||
|
|
|
@ -134,8 +134,7 @@ class SIOInputStream extends stream.Readable {
|
||||||
if (this._destroyed) {
|
if (this._destroyed) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
this._readState.pushBuffer.push.apply(this._readState.pushBuffer,
|
this._readState.pushBuffer.push(...data);
|
||||||
data);
|
|
||||||
if (this._readState.readable) {
|
if (this._readState.readable) {
|
||||||
this._pushData();
|
this._pushData();
|
||||||
}
|
}
|
||||||
|
@ -260,9 +259,9 @@ class SIOStreamSocket {
|
||||||
return arg;
|
return arg;
|
||||||
}
|
}
|
||||||
const log = this.logger;
|
const log = this.logger;
|
||||||
const isReadStream = (typeof(arg.pipe) === 'function'
|
const isReadStream = (typeof arg.pipe === 'function'
|
||||||
&& typeof (arg.read) === 'function');
|
&& typeof (arg.read) === 'function');
|
||||||
let isWriteStream = (typeof(arg.write) === 'function');
|
let isWriteStream = (typeof arg.write === 'function');
|
||||||
|
|
||||||
if (isReadStream || isWriteStream) {
|
if (isReadStream || isWriteStream) {
|
||||||
if (isReadStream && isWriteStream) {
|
if (isReadStream && isWriteStream) {
|
||||||
|
@ -303,7 +302,7 @@ class SIOStreamSocket {
|
||||||
}
|
}
|
||||||
return encodedStream;
|
return encodedStream;
|
||||||
}
|
}
|
||||||
if (typeof(arg) === 'object') {
|
if (typeof arg === 'object') {
|
||||||
let encodedObj;
|
let encodedObj;
|
||||||
if (Array.isArray(arg)) {
|
if (Array.isArray(arg)) {
|
||||||
encodedObj = [];
|
encodedObj = [];
|
||||||
|
@ -377,7 +376,7 @@ class SIOStreamSocket {
|
||||||
});
|
});
|
||||||
return stream;
|
return stream;
|
||||||
}
|
}
|
||||||
if (typeof(arg) === 'object') {
|
if (typeof arg === 'object') {
|
||||||
let decodedObj;
|
let decodedObj;
|
||||||
if (Array.isArray(arg)) {
|
if (Array.isArray(arg)) {
|
||||||
decodedObj = [];
|
decodedObj = [];
|
||||||
|
@ -412,7 +411,7 @@ class SIOStreamSocket {
|
||||||
_error(streamId, error) {
|
_error(streamId, error) {
|
||||||
this.logger.debug('emit \'stream-error\' event', { streamId, error });
|
this.logger.debug('emit \'stream-error\' event', { streamId, error });
|
||||||
this.socket.emit('stream-error', { streamId,
|
this.socket.emit('stream-error', { streamId,
|
||||||
error: flattenError(error) });
|
error: flattenError(error) });
|
||||||
}
|
}
|
||||||
|
|
||||||
_hangup(streamId) {
|
_hangup(streamId) {
|
||||||
|
|
|
@ -91,6 +91,10 @@ const _actionMapSSO = {
|
||||||
SsoAuthorize: 'sso:Authorize',
|
SsoAuthorize: 'sso:Authorize',
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const _actionMapSTS = {
|
||||||
|
assumeRole: 'sts:AssumeRole',
|
||||||
|
};
|
||||||
|
|
||||||
function _findAction(service, method) {
|
function _findAction(service, method) {
|
||||||
if (service === 's3') {
|
if (service === 's3') {
|
||||||
return _actionMap[method];
|
return _actionMap[method];
|
||||||
|
@ -108,6 +112,9 @@ function _findAction(service, method) {
|
||||||
// currently only method is ListMetrics
|
// currently only method is ListMetrics
|
||||||
return `utapi:${method}`;
|
return `utapi:${method}`;
|
||||||
}
|
}
|
||||||
|
if (service === 'sts') {
|
||||||
|
return _actionMapSTS[method];
|
||||||
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -123,13 +130,17 @@ function _buildArn(service, generalResource, specificResource, requesterInfo) {
|
||||||
}
|
}
|
||||||
return 'arn:aws:s3:::';
|
return 'arn:aws:s3:::';
|
||||||
}
|
}
|
||||||
if (service === 'iam') {
|
if (service === 'iam' || service === 'sts') {
|
||||||
// arn:aws:iam::<account-id>:<resource-type><resource>
|
// arn:aws:iam::<account-id>:<resource-type><resource>
|
||||||
|
let accountId = requesterInfo.accountid;
|
||||||
|
if (service === 'sts') {
|
||||||
|
accountId = requesterInfo.targetAccountId;
|
||||||
|
}
|
||||||
if (specificResource) {
|
if (specificResource) {
|
||||||
return `arn:aws:iam::${requesterInfo.accountid}:` +
|
return `arn:aws:iam::${accountId}:` +
|
||||||
`${generalResource}${specificResource}`;
|
`${generalResource}${specificResource}`;
|
||||||
}
|
}
|
||||||
return `arn:aws:iam::${requesterInfo.accountid}:${generalResource}`;
|
return `arn:aws:iam::${accountId}:${generalResource}`;
|
||||||
}
|
}
|
||||||
if (service === 'ring') {
|
if (service === 'ring') {
|
||||||
// arn:aws:iam::<account-id>:<resource-type><resource>
|
// arn:aws:iam::<account-id>:<resource-type><resource>
|
||||||
|
@ -177,6 +188,7 @@ function _buildArn(service, generalResource, specificResource, requesterInfo) {
|
||||||
* @param {string} authType - type of authentication used
|
* @param {string} authType - type of authentication used
|
||||||
* @param {number} signatureAge - age of signature in milliseconds
|
* @param {number} signatureAge - age of signature in milliseconds
|
||||||
* @param {string} securityToken - auth security token (temporary credentials)
|
* @param {string} securityToken - auth security token (temporary credentials)
|
||||||
|
* @param {string} policyArn - policy arn
|
||||||
* @return {RequestContext} a RequestContext instance
|
* @return {RequestContext} a RequestContext instance
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
@ -184,7 +196,8 @@ class RequestContext {
|
||||||
constructor(headers, query, generalResource, specificResource,
|
constructor(headers, query, generalResource, specificResource,
|
||||||
requesterIp, sslEnabled, apiMethod,
|
requesterIp, sslEnabled, apiMethod,
|
||||||
awsService, locationConstraint, requesterInfo,
|
awsService, locationConstraint, requesterInfo,
|
||||||
signatureVersion, authType, signatureAge, securityToken) {
|
signatureVersion, authType, signatureAge, securityToken, policyArn,
|
||||||
|
action) {
|
||||||
this._headers = headers;
|
this._headers = headers;
|
||||||
this._query = query;
|
this._query = query;
|
||||||
this._requesterIp = requesterIp;
|
this._requesterIp = requesterIp;
|
||||||
|
@ -210,7 +223,8 @@ class RequestContext {
|
||||||
this._authType = authType;
|
this._authType = authType;
|
||||||
this._signatureAge = signatureAge;
|
this._signatureAge = signatureAge;
|
||||||
this._securityToken = securityToken;
|
this._securityToken = securityToken;
|
||||||
|
this._policyArn = policyArn;
|
||||||
|
this._action = action;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -237,6 +251,8 @@ class RequestContext {
|
||||||
locationConstraint: this._locationConstraint,
|
locationConstraint: this._locationConstraint,
|
||||||
tokenIssueTime: this._tokenIssueTime,
|
tokenIssueTime: this._tokenIssueTime,
|
||||||
securityToken: this._securityToken,
|
securityToken: this._securityToken,
|
||||||
|
policyArn: this._policyArn,
|
||||||
|
action: this._action,
|
||||||
};
|
};
|
||||||
return JSON.stringify(requestInfo);
|
return JSON.stringify(requestInfo);
|
||||||
}
|
}
|
||||||
|
@ -257,7 +273,8 @@ class RequestContext {
|
||||||
obj.specificResource, obj.requesterIp, obj.sslEnabled,
|
obj.specificResource, obj.requesterIp, obj.sslEnabled,
|
||||||
obj.apiMethod, obj.awsService, obj.locationConstraint,
|
obj.apiMethod, obj.awsService, obj.locationConstraint,
|
||||||
obj.requesterInfo, obj.signatureVersion,
|
obj.requesterInfo, obj.signatureVersion,
|
||||||
obj.authType, obj.signatureAge, obj.securityToken);
|
obj.authType, obj.signatureAge, obj.securityToken, obj.policyArn,
|
||||||
|
obj.action);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -265,6 +282,9 @@ class RequestContext {
|
||||||
* @return {string} action
|
* @return {string} action
|
||||||
*/
|
*/
|
||||||
getAction() {
|
getAction() {
|
||||||
|
if (this._action) {
|
||||||
|
return this._action;
|
||||||
|
}
|
||||||
if (this._foundAction) {
|
if (this._foundAction) {
|
||||||
return this._foundAction;
|
return this._foundAction;
|
||||||
}
|
}
|
||||||
|
@ -355,6 +375,26 @@ class RequestContext {
|
||||||
return parseIp(this._requesterIp);
|
return parseIp(this._requesterIp);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
getRequesterAccountId() {
|
||||||
|
return this._requesterInfo.accountid;
|
||||||
|
}
|
||||||
|
|
||||||
|
getRequesterEndArn() {
|
||||||
|
return this._requesterInfo.arn;
|
||||||
|
}
|
||||||
|
|
||||||
|
getRequesterExternalId() {
|
||||||
|
return this._requesterInfo.externalId;
|
||||||
|
}
|
||||||
|
|
||||||
|
getRequesterPrincipalArn() {
|
||||||
|
return this._requesterInfo.parentArn || this._requesterInfo.arn;
|
||||||
|
}
|
||||||
|
|
||||||
|
getRequesterType() {
|
||||||
|
return this._requesterInfo.principalType;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set sslEnabled
|
* Set sslEnabled
|
||||||
* @param {boolean} sslEnabled - true if https used
|
* @param {boolean} sslEnabled - true if https used
|
||||||
|
@ -548,6 +588,26 @@ class RequestContext {
|
||||||
this._securityToken = token;
|
this._securityToken = token;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the policy arn
|
||||||
|
*
|
||||||
|
* @return {string} policyArn - Policy arn
|
||||||
|
*/
|
||||||
|
getPolicyArn() {
|
||||||
|
return this._policyArn;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the policy arn
|
||||||
|
*
|
||||||
|
* @param {string} policyArn - Policy arn
|
||||||
|
* @return {RequestContext} itself
|
||||||
|
*/
|
||||||
|
setPolicyArn(policyArn) {
|
||||||
|
this._policyArn = policyArn;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = RequestContext;
|
module.exports = RequestContext;
|
||||||
|
|
|
@ -38,7 +38,7 @@ function isResourceApplicable(requestContext, statementResource, log) {
|
||||||
// Pull just the relative id because there is no restriction that it
|
// Pull just the relative id because there is no restriction that it
|
||||||
// does not contain ":"
|
// does not contain ":"
|
||||||
const requestRelativeId = requestResourceArr.slice(5).join(':');
|
const requestRelativeId = requestResourceArr.slice(5).join(':');
|
||||||
for (let i = 0; i < statementResource.length; i ++) {
|
for (let i = 0; i < statementResource.length; i++) {
|
||||||
// Handle variables (must handle BEFORE wildcards)
|
// Handle variables (must handle BEFORE wildcards)
|
||||||
const policyResource =
|
const policyResource =
|
||||||
substituteVariables(statementResource[i], requestContext);
|
substituteVariables(statementResource[i], requestContext);
|
||||||
|
@ -73,7 +73,7 @@ function isActionApplicable(requestAction, statementAction, log) {
|
||||||
statementAction = [statementAction];
|
statementAction = [statementAction];
|
||||||
}
|
}
|
||||||
const length = statementAction.length;
|
const length = statementAction.length;
|
||||||
for (let i = 0; i < length; i ++) {
|
for (let i = 0; i < length; i++) {
|
||||||
// No variables in actions so no need to handle
|
// No variables in actions so no need to handle
|
||||||
const regExStrOfStatementAction =
|
const regExStrOfStatementAction =
|
||||||
handleWildcards(statementAction[i]);
|
handleWildcards(statementAction[i]);
|
||||||
|
@ -98,12 +98,12 @@ function isActionApplicable(requestAction, statementAction, log) {
|
||||||
* @param {Object} log - logger
|
* @param {Object} log - logger
|
||||||
* @return {boolean} true if meet conditions, false if not
|
* @return {boolean} true if meet conditions, false if not
|
||||||
*/
|
*/
|
||||||
function meetConditions(requestContext, statementCondition, log) {
|
evaluators.meetConditions = (requestContext, statementCondition, log) => {
|
||||||
// The Condition portion of a policy is an object with different
|
// The Condition portion of a policy is an object with different
|
||||||
// operators as keys
|
// operators as keys
|
||||||
const operators = Object.keys(statementCondition);
|
const operators = Object.keys(statementCondition);
|
||||||
const length = operators.length;
|
const length = operators.length;
|
||||||
for (let i = 0; i < length; i ++) {
|
for (let i = 0; i < length; i++) {
|
||||||
const operator = operators[i];
|
const operator = operators[i];
|
||||||
const hasIfExistsCondition = operator.endsWith('IfExists');
|
const hasIfExistsCondition = operator.endsWith('IfExists');
|
||||||
// If has "IfExists" added to operator name, find operator name
|
// If has "IfExists" added to operator name, find operator name
|
||||||
|
@ -119,8 +119,7 @@ function meetConditions(requestContext, statementCondition, log) {
|
||||||
const conditionsWithSameOperator = statementCondition[operator];
|
const conditionsWithSameOperator = statementCondition[operator];
|
||||||
const conditionKeys = Object.keys(conditionsWithSameOperator);
|
const conditionKeys = Object.keys(conditionsWithSameOperator);
|
||||||
const conditionKeysLength = conditionKeys.length;
|
const conditionKeysLength = conditionKeys.length;
|
||||||
for (let j = 0; j < conditionKeysLength;
|
for (let j = 0; j < conditionKeysLength; j++) {
|
||||||
j ++) {
|
|
||||||
const key = conditionKeys[j];
|
const key = conditionKeys[j];
|
||||||
let value = conditionsWithSameOperator[key];
|
let value = conditionsWithSameOperator[key];
|
||||||
if (!Array.isArray(value)) {
|
if (!Array.isArray(value)) {
|
||||||
|
@ -165,13 +164,13 @@ function meetConditions(requestContext, statementCondition, log) {
|
||||||
// are the only operators where wildcards are allowed
|
// are the only operators where wildcards are allowed
|
||||||
if (!operatorFunction(keyBasedOnRequestContext, value)) {
|
if (!operatorFunction(keyBasedOnRequestContext, value)) {
|
||||||
log.trace('did not satisfy condition', { operator: bareOperator,
|
log.trace('did not satisfy condition', { operator: bareOperator,
|
||||||
keyBasedOnRequestContext, policyValue: value });
|
keyBasedOnRequestContext, policyValue: value });
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Evaluate whether a request is permitted under a policy.
|
* Evaluate whether a request is permitted under a policy.
|
||||||
|
@ -222,7 +221,8 @@ evaluators.evaluatePolicy = (requestContext, policy, log) => {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
// If do not meet conditions move on to next statement
|
// If do not meet conditions move on to next statement
|
||||||
if (currentStatement.Condition && !meetConditions(requestContext,
|
if (currentStatement.Condition &&
|
||||||
|
!evaluators.meetConditions(requestContext,
|
||||||
currentStatement.Condition, log)) {
|
currentStatement.Condition, log)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,176 @@
|
||||||
|
const { meetConditions } = require('./evaluator');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class with methods to manage the policy 'principal' validation
|
||||||
|
*/
|
||||||
|
class Principal {
|
||||||
|
/**
|
||||||
|
* Function to evaluate conditions if needed
|
||||||
|
*
|
||||||
|
* @param {object} params - Evaluation parameters
|
||||||
|
* @param {object} statement - Statement policy field
|
||||||
|
* @return {boolean} True if meet conditions
|
||||||
|
*/
|
||||||
|
static _evaluateCondition(params, statement) {
|
||||||
|
if (statement.Condition) {
|
||||||
|
return meetConditions(params.rc, statement.Condition, params.log);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks principal field against valid principals array
|
||||||
|
*
|
||||||
|
* @param {object} params - Evaluation parameters
|
||||||
|
* @param {object} statement - Statement policy field
|
||||||
|
* @param {object} valids - Valid principal fields
|
||||||
|
* @return {string} result of principal evaluation, either 'Neutral',
|
||||||
|
* 'Allow' or 'Deny'
|
||||||
|
*/
|
||||||
|
static _evaluatePrincipalField(params, statement, valids) {
|
||||||
|
const reverse = !!statement.NotPrincipal;
|
||||||
|
const principal = statement.Principal || statement.NotPrincipal;
|
||||||
|
if (typeof principal === 'string' && principal === '*') {
|
||||||
|
if (reverse) {
|
||||||
|
// In case of anonymous NotPrincipal, this will neutral everyone
|
||||||
|
return 'Neutral';
|
||||||
|
}
|
||||||
|
if (!Principal._evaluateCondition(params, statement)) {
|
||||||
|
return 'Neutral';
|
||||||
|
}
|
||||||
|
return statement.Effect;
|
||||||
|
} else if (typeof principal === 'string') {
|
||||||
|
return 'Deny';
|
||||||
|
}
|
||||||
|
let ref = [];
|
||||||
|
let toCheck = [];
|
||||||
|
if (valids.Federated && principal.Federated) {
|
||||||
|
ref = valids.Federated;
|
||||||
|
toCheck = principal.Federated;
|
||||||
|
} else if (valids.AWS && principal.AWS) {
|
||||||
|
ref = valids.AWS;
|
||||||
|
toCheck = principal.AWS;
|
||||||
|
} else if (valids.Service && principal.Service) {
|
||||||
|
ref = valids.Service;
|
||||||
|
toCheck = principal.Service;
|
||||||
|
} else {
|
||||||
|
if (reverse) {
|
||||||
|
return statement.Effect;
|
||||||
|
}
|
||||||
|
return 'Neutral';
|
||||||
|
}
|
||||||
|
toCheck = Array.isArray(toCheck) ? toCheck : [toCheck];
|
||||||
|
ref = Array.isArray(ref) ? ref : [ref];
|
||||||
|
if (toCheck.indexOf('*') !== -1) {
|
||||||
|
if (reverse) {
|
||||||
|
return 'Neutral';
|
||||||
|
}
|
||||||
|
if (!Principal._evaluateCondition(params, statement)) {
|
||||||
|
return 'Neutral';
|
||||||
|
}
|
||||||
|
return statement.Effect;
|
||||||
|
}
|
||||||
|
const len = ref.length;
|
||||||
|
for (let i = 0; i < len; ++i) {
|
||||||
|
if (toCheck.indexOf(ref[i]) !== -1) {
|
||||||
|
if (reverse) {
|
||||||
|
return 'Neutral';
|
||||||
|
}
|
||||||
|
if (!Principal._evaluateCondition(params, statement)) {
|
||||||
|
return 'Neutral';
|
||||||
|
}
|
||||||
|
return statement.Effect;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (reverse) {
|
||||||
|
return statement.Effect;
|
||||||
|
}
|
||||||
|
return 'Neutral';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Function to evaluate principal of statements against a valid principal
|
||||||
|
* array
|
||||||
|
*
|
||||||
|
* @param {object} params - Evaluation parameters
|
||||||
|
* @param {object} valids - Valid principal fields
|
||||||
|
* @return {string} result of principal evaluation, either 'Allow' or 'Deny'
|
||||||
|
*/
|
||||||
|
static _evaluatePrincipal(params, valids) {
|
||||||
|
const doc = params.trustedPolicy;
|
||||||
|
let statements = doc.Statement;
|
||||||
|
if (!Array.isArray(statements)) {
|
||||||
|
statements = [statements];
|
||||||
|
}
|
||||||
|
const len = statements.length;
|
||||||
|
let authorized = 'Deny';
|
||||||
|
for (let i = 0; i < len; ++i) {
|
||||||
|
const statement = statements[i];
|
||||||
|
const result = Principal._evaluatePrincipalField(params,
|
||||||
|
statement, valids);
|
||||||
|
if (result === 'Deny') {
|
||||||
|
return 'Deny';
|
||||||
|
} else if (result === 'Allow') {
|
||||||
|
authorized = 'Allow';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return authorized;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Function to evaluate principal for a policy
|
||||||
|
*
|
||||||
|
* @param {object} params - Evaluation parameters
|
||||||
|
* @return {object} {
|
||||||
|
* result: 'Allow' or 'Deny',
|
||||||
|
* checkAction: true or false,
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
static evaluatePrincipal(params) {
|
||||||
|
let valids = null;
|
||||||
|
let checkAction = false;
|
||||||
|
const account = params.rc.getRequesterAccountId();
|
||||||
|
const targetAccount = params.targetAccountId;
|
||||||
|
const accountArn = `arn:aws:iam::${account}:root`;
|
||||||
|
const requesterArn = params.rc.getRequesterPrincipalArn();
|
||||||
|
const requesterEndArn = params.rc.getRequesterEndArn();
|
||||||
|
const requesterType = params.rc.getRequesterType();
|
||||||
|
if (account !== targetAccount) {
|
||||||
|
valids = {
|
||||||
|
AWS: [
|
||||||
|
account,
|
||||||
|
accountArn,
|
||||||
|
],
|
||||||
|
};
|
||||||
|
checkAction = true;
|
||||||
|
} else {
|
||||||
|
if (requesterType === 'User' || requesterType === 'AssumedRole' ||
|
||||||
|
requesterType === 'Federated') {
|
||||||
|
valids = {
|
||||||
|
AWS: [
|
||||||
|
account,
|
||||||
|
accountArn,
|
||||||
|
],
|
||||||
|
};
|
||||||
|
if (requesterType === 'User' ||
|
||||||
|
requesterType === 'AssumedRole') {
|
||||||
|
valids.AWS.push(requesterArn);
|
||||||
|
if (requesterEndArn !== requesterArn) {
|
||||||
|
valids.AWS.push(requesterEndArn);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
valids.Federated = [requesterArn];
|
||||||
|
}
|
||||||
|
} else if (requesterType === 'Service') {
|
||||||
|
valids = { Service: requesterArn };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const result = Principal._evaluatePrincipal(params, valids);
|
||||||
|
return {
|
||||||
|
result,
|
||||||
|
checkAction,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Principal;
|
|
@ -14,8 +14,7 @@ const handleWildcardInResource =
|
||||||
*/
|
*/
|
||||||
function checkArnMatch(policyArn, requestRelativeId, requestArnArr,
|
function checkArnMatch(policyArn, requestRelativeId, requestArnArr,
|
||||||
caseSensitive) {
|
caseSensitive) {
|
||||||
let regExofArn = handleWildcardInResource(policyArn);
|
const regExofArn = handleWildcardInResource(policyArn);
|
||||||
regExofArn = caseSensitive ? regExofArn : regExofArn.toLowerCase();
|
|
||||||
// The relativeId is the last part of the ARN (for instance, a bucket and
|
// The relativeId is the last part of the ARN (for instance, a bucket and
|
||||||
// object name in S3)
|
// object name in S3)
|
||||||
// Join on ":" in case there were ":" in the relativeID at the end
|
// Join on ":" in case there were ":" in the relativeID at the end
|
||||||
|
@ -31,7 +30,7 @@ function checkArnMatch(policyArn, requestRelativeId, requestArnArr,
|
||||||
}
|
}
|
||||||
// Check the other parts of the ARN to make sure they match. If not,
|
// Check the other parts of the ARN to make sure they match. If not,
|
||||||
// return false.
|
// return false.
|
||||||
for (let j = 0; j < 5; j ++) {
|
for (let j = 0; j < 5; j++) {
|
||||||
const segmentRegEx = new RegExp(regExofArn[j]);
|
const segmentRegEx = new RegExp(regExofArn[j]);
|
||||||
const requestSegment = caseSensitive ? requestArnArr[j] :
|
const requestSegment = caseSensitive ? requestArnArr[j] :
|
||||||
requestArnArr[j].toLowerCase();
|
requestArnArr[j].toLowerCase();
|
||||||
|
|
|
@ -144,6 +144,8 @@ conditions.findConditionKey = (key, requestContext) => {
|
||||||
// header
|
// header
|
||||||
map.set('s3:ObjLocationConstraint',
|
map.set('s3:ObjLocationConstraint',
|
||||||
headers['x-amz-meta-scal-location-constraint']);
|
headers['x-amz-meta-scal-location-constraint']);
|
||||||
|
map.set('sts:ExternalId', requestContext.getRequesterExternalId());
|
||||||
|
map.set('iam:PolicyArn', requestContext.getPolicyArn());
|
||||||
return map.get(key);
|
return map.get(key);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,83 @@
|
||||||
|
const EventEmitter = require('events');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Class to collect results of streaming subparts.
|
||||||
|
* Emits "done" event when streaming is complete and Azure has returned
|
||||||
|
* results for putting each of the subparts
|
||||||
|
* Emits "error" event if Azure returns an error for putting a subpart and
|
||||||
|
* streaming is in-progress
|
||||||
|
* @class ResultsCollector
|
||||||
|
*/
|
||||||
|
class ResultsCollector extends EventEmitter {
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
*/
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
this._results = [];
|
||||||
|
this._queue = 0;
|
||||||
|
this._streamingFinished = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ResultsCollector.pushResult - register result of putting one subpart
|
||||||
|
* and emit "done" or "error" events if appropriate
|
||||||
|
* @param {(Error|undefined)} err - error returned from Azure after
|
||||||
|
* putting a subpart
|
||||||
|
* @param {number} subPartIndex - the index of the subpart
|
||||||
|
* @emits ResultCollector#done
|
||||||
|
* @emits ResultCollector#error
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
pushResult(err, subPartIndex) {
|
||||||
|
this._results.push({
|
||||||
|
error: err,
|
||||||
|
subPartIndex,
|
||||||
|
});
|
||||||
|
this._queue--;
|
||||||
|
if (this._resultsComplete()) {
|
||||||
|
this.emit('done', err, this._results);
|
||||||
|
} else if (err) {
|
||||||
|
this.emit('error', err, subPartIndex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ResultsCollector.pushOp - register operation to put another subpart
|
||||||
|
* @return {undefined};
|
||||||
|
*/
|
||||||
|
pushOp() {
|
||||||
|
this._queue++;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ResultsCollector.enableComplete - register streaming has finished,
|
||||||
|
* allowing ResultCollector#done event to be emitted when last result
|
||||||
|
* has been returned
|
||||||
|
* @return {undefined};
|
||||||
|
*/
|
||||||
|
enableComplete() {
|
||||||
|
this._streamingFinished = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
_resultsComplete() {
|
||||||
|
return (this._queue === 0 && this._streamingFinished);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* "done" event
|
||||||
|
* @event ResultCollector#done
|
||||||
|
* @type {(Error|undefined)} err - error returned by Azure putting last subpart
|
||||||
|
* @type {object[]} results - result for putting each of the subparts
|
||||||
|
* @property {Error} [results[].error] - error returned by Azure putting subpart
|
||||||
|
* @property {number} results[].subPartIndex - index of the subpart
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* "error" event
|
||||||
|
* @event ResultCollector#error
|
||||||
|
* @type {(Error|undefined)} error - error returned by Azure last subpart
|
||||||
|
* @type {number} subPartIndex - index of the subpart
|
||||||
|
*/
|
||||||
|
|
||||||
|
module.exports = ResultsCollector;
|
|
@ -0,0 +1,126 @@
|
||||||
|
const stream = require('stream');
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Interface for streaming subparts.
|
||||||
|
* @class SubStreamInterface
|
||||||
|
*/
|
||||||
|
class SubStreamInterface {
|
||||||
|
/**
|
||||||
|
* @constructor
|
||||||
|
* @param {stream.Readable} sourceStream - stream to read for data
|
||||||
|
*/
|
||||||
|
constructor(sourceStream) {
|
||||||
|
this._sourceStream = sourceStream;
|
||||||
|
this._totalLengthCounter = 0;
|
||||||
|
this._lengthCounter = 0;
|
||||||
|
this._subPartIndex = 0;
|
||||||
|
this._currentStream = new stream.PassThrough();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SubStreamInterface.pauseStreaming - pause data flow
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
pauseStreaming() {
|
||||||
|
this._sourceStream.pause();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SubStreamInterface.resumeStreaming - resume data flow
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
resumeStreaming() {
|
||||||
|
this._sourceStream.resume();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SubStreamInterface.endStreaming - signal end of data for last stream,
|
||||||
|
* to be called when source stream has ended
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
endStreaming() {
|
||||||
|
this._totalLengthCounter += this._lengthCounter;
|
||||||
|
this._currentStream.end();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SubStreamInterface.stopStreaming - destroy streams,
|
||||||
|
* to be called when streaming must be stopped externally
|
||||||
|
* @param {stream.Readable} [piper] - a stream that is piping data into
|
||||||
|
* source stream
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
stopStreaming(piper) {
|
||||||
|
if (piper) {
|
||||||
|
piper.unpipe();
|
||||||
|
piper.destroy();
|
||||||
|
}
|
||||||
|
this._sourceStream.destroy();
|
||||||
|
this._currentStream.destroy();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SubStreamInterface.getLengthCounter - return length of bytes streamed
|
||||||
|
* for current subpart
|
||||||
|
* @return {number} - this._lengthCounter
|
||||||
|
*/
|
||||||
|
getLengthCounter() {
|
||||||
|
return this._lengthCounter;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SubStreamInterface.getTotalBytesStreamed - return total bytes streamed
|
||||||
|
* @return {number} - this._totalLengthCounter
|
||||||
|
*/
|
||||||
|
getTotalBytesStreamed() {
|
||||||
|
return this._totalLengthCounter;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SubStreamInterface.getCurrentStream - return subpart stream currently
|
||||||
|
* being written to from source stream
|
||||||
|
* @return {number} - this._currentStream
|
||||||
|
*/
|
||||||
|
getCurrentStream() {
|
||||||
|
return this._currentStream;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SubStreamInterface.transitionToNextStream - signal end of data for
|
||||||
|
* current stream, generate a new stream and start streaming to new stream
|
||||||
|
* @return {object} - return object containing new current stream and
|
||||||
|
* subpart index of current subpart
|
||||||
|
*/
|
||||||
|
transitionToNextStream() {
|
||||||
|
this.pauseStreaming();
|
||||||
|
this._currentStream.end();
|
||||||
|
this._totalLengthCounter += this._lengthCounter;
|
||||||
|
this._lengthCounter = 0;
|
||||||
|
this._subPartIndex++;
|
||||||
|
this._currentStream = new stream.PassThrough();
|
||||||
|
this.resumeStreaming();
|
||||||
|
return {
|
||||||
|
nextStream: this._currentStream,
|
||||||
|
subPartIndex: this._subPartIndex,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SubStreamInterface.write - write to the current stream
|
||||||
|
* @param {Buffer} chunk - a chunk of data
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
write(chunk) {
|
||||||
|
const ready = this._currentStream.write(chunk);
|
||||||
|
|
||||||
|
if (!ready) {
|
||||||
|
this.pauseStreaming();
|
||||||
|
this._currentStream.once('drain', () => {
|
||||||
|
this.resumeStreaming();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
this._lengthCounter += chunk.length;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = SubStreamInterface;
|
|
@ -0,0 +1,224 @@
|
||||||
|
const crypto = require('crypto');
|
||||||
|
const stream = require('stream');
|
||||||
|
|
||||||
|
const ResultsCollector = require('./ResultsCollector');
|
||||||
|
const SubStreamInterface = require('./SubStreamInterface');
|
||||||
|
const objectUtils = require('../objectUtils');
|
||||||
|
const MD5Sum = require('../MD5Sum');
|
||||||
|
const errors = require('../../errors');
|
||||||
|
|
||||||
|
const azureMpuUtils = {};
|
||||||
|
|
||||||
|
azureMpuUtils.splitter = '|';
|
||||||
|
azureMpuUtils.overviewMpuKey = 'azure_mpu';
|
||||||
|
azureMpuUtils.maxSubPartSize = 104857600;
|
||||||
|
azureMpuUtils.zeroByteETag = crypto.createHash('md5').update('').digest('hex');
|
||||||
|
|
||||||
|
|
||||||
|
azureMpuUtils.padString = (str, category) => {
|
||||||
|
const _padFn = {
|
||||||
|
left: (str, padString) =>
|
||||||
|
`${padString}${str}`.substr(-padString.length),
|
||||||
|
right: (str, padString) =>
|
||||||
|
`${str}${padString}`.substr(0, padString.length),
|
||||||
|
};
|
||||||
|
// It's a little more performant if we add pre-generated strings for each
|
||||||
|
// type of padding we want to apply, instead of using string.repeat() to
|
||||||
|
// create the padding.
|
||||||
|
const padSpec = {
|
||||||
|
partNumber: {
|
||||||
|
padString: '00000',
|
||||||
|
direction: 'left',
|
||||||
|
},
|
||||||
|
subPart: {
|
||||||
|
padString: '00',
|
||||||
|
direction: 'left',
|
||||||
|
},
|
||||||
|
part: {
|
||||||
|
padString:
|
||||||
|
'%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%',
|
||||||
|
direction: 'right',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const { direction, padString } = padSpec[category];
|
||||||
|
return _padFn[direction](str, padString);
|
||||||
|
};
|
||||||
|
|
||||||
|
// NOTE: If we want to extract the object name from these keys, we will need
|
||||||
|
// to use a similar method to _getKeyAndUploadIdFromMpuKey since the object
|
||||||
|
// name may have instances of the splitter used to delimit arguments
|
||||||
|
azureMpuUtils.getMpuSummaryKey = (objectName, uploadId) =>
|
||||||
|
`${objectName}${azureMpuUtils.splitter}${uploadId}`;
|
||||||
|
|
||||||
|
azureMpuUtils.getBlockId = (uploadId, partNumber, subPartIndex) => {
|
||||||
|
const paddedPartNumber = azureMpuUtils.padString(partNumber, 'partNumber');
|
||||||
|
const paddedSubPart = azureMpuUtils.padString(subPartIndex, 'subPart');
|
||||||
|
const splitter = azureMpuUtils.splitter;
|
||||||
|
const blockId = `${uploadId}${splitter}partNumber${paddedPartNumber}` +
|
||||||
|
`${splitter}subPart${paddedSubPart}${splitter}`;
|
||||||
|
return azureMpuUtils.padString(blockId, 'part');
|
||||||
|
};
|
||||||
|
|
||||||
|
azureMpuUtils.getSummaryPartId = (partNumber, eTag, size) => {
|
||||||
|
const paddedPartNumber = azureMpuUtils.padString(partNumber, 'partNumber');
|
||||||
|
const timestamp = Date.now();
|
||||||
|
const splitter = azureMpuUtils.splitter;
|
||||||
|
const summaryKey = `${paddedPartNumber}${splitter}${timestamp}` +
|
||||||
|
`${splitter}${eTag}${splitter}${size}${splitter}`;
|
||||||
|
return azureMpuUtils.padString(summaryKey, 'part');
|
||||||
|
};
|
||||||
|
|
||||||
|
azureMpuUtils.getSubPartInfo = dataContentLength => {
|
||||||
|
const numberFullSubParts =
|
||||||
|
Math.floor(dataContentLength / azureMpuUtils.maxSubPartSize);
|
||||||
|
const remainder = dataContentLength % azureMpuUtils.maxSubPartSize;
|
||||||
|
const numberSubParts = remainder ?
|
||||||
|
numberFullSubParts + 1 : numberFullSubParts;
|
||||||
|
const lastPartSize = remainder || azureMpuUtils.maxSubPartSize;
|
||||||
|
return {
|
||||||
|
lastPartIndex: numberSubParts - 1,
|
||||||
|
lastPartSize,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
azureMpuUtils.getSubPartSize = (subPartInfo, subPartIndex) => {
|
||||||
|
const { lastPartIndex, lastPartSize } = subPartInfo;
|
||||||
|
return subPartIndex === lastPartIndex ?
|
||||||
|
lastPartSize : azureMpuUtils.maxSubPartSize;
|
||||||
|
};
|
||||||
|
|
||||||
|
azureMpuUtils.getSubPartIds = (part, uploadId) =>
|
||||||
|
[...Array(part.numberSubParts).keys()].map(subPartIndex =>
|
||||||
|
azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex));
|
||||||
|
|
||||||
|
azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName,
|
||||||
|
log, cb) => {
|
||||||
|
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
|
||||||
|
= params;
|
||||||
|
const totalSubParts = 1;
|
||||||
|
const blockId = azureMpuUtils.getBlockId(uploadId, partNumber, 0);
|
||||||
|
const passThrough = new stream.PassThrough();
|
||||||
|
const options = {};
|
||||||
|
if (contentMD5) {
|
||||||
|
options.useTransactionalMD5 = true;
|
||||||
|
options.transactionalContentMD5 = contentMD5;
|
||||||
|
}
|
||||||
|
request.pipe(passThrough);
|
||||||
|
return errorWrapperFn('uploadPart', 'createBlockFromStream',
|
||||||
|
[blockId, bucketName, objectKey, passThrough, size, options,
|
||||||
|
(err, result) => {
|
||||||
|
if (err) {
|
||||||
|
log.error('Error from Azure data backend uploadPart',
|
||||||
|
{ error: err.message, dataStoreName });
|
||||||
|
if (err.code === 'ContainerNotFound') {
|
||||||
|
return cb(errors.NoSuchBucket);
|
||||||
|
}
|
||||||
|
if (err.code === 'InvalidMd5') {
|
||||||
|
return cb(errors.InvalidDigest);
|
||||||
|
}
|
||||||
|
if (err.code === 'Md5Mismatch') {
|
||||||
|
return cb(errors.BadDigest);
|
||||||
|
}
|
||||||
|
return cb(errors.InternalError.customizeDescription(
|
||||||
|
`Error returned from Azure: ${err.message}`)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const eTag = objectUtils.getHexMD5(result.headers['content-md5']);
|
||||||
|
return cb(null, eTag, totalSubParts, size);
|
||||||
|
}], log, cb);
|
||||||
|
};
|
||||||
|
|
||||||
|
azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo,
|
||||||
|
subPartStream, subPartIndex, resultsCollector, log, cb) => {
|
||||||
|
const { uploadId, partNumber, bucketName, objectKey } = partParams;
|
||||||
|
const subPartSize = azureMpuUtils.getSubPartSize(
|
||||||
|
subPartInfo, subPartIndex);
|
||||||
|
const subPartId = azureMpuUtils.getBlockId(uploadId, partNumber,
|
||||||
|
subPartIndex);
|
||||||
|
resultsCollector.pushOp();
|
||||||
|
errorWrapperFn('uploadPart', 'createBlockFromStream',
|
||||||
|
[subPartId, bucketName, objectKey, subPartStream, subPartSize,
|
||||||
|
{}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb);
|
||||||
|
};
|
||||||
|
|
||||||
|
azureMpuUtils.putSubParts = (errorWrapperFn, request, params,
|
||||||
|
dataStoreName, log, cb) => {
|
||||||
|
const subPartInfo = azureMpuUtils.getSubPartInfo(params.size);
|
||||||
|
const resultsCollector = new ResultsCollector();
|
||||||
|
const hashedStream = new MD5Sum();
|
||||||
|
const streamInterface = new SubStreamInterface(hashedStream);
|
||||||
|
log.trace('data length is greater than max subpart size;' +
|
||||||
|
'putting multiple parts');
|
||||||
|
|
||||||
|
resultsCollector.on('error', (err, subPartIndex) => {
|
||||||
|
streamInterface.stopStreaming(request);
|
||||||
|
log.error(`Error putting subpart to Azure: ${subPartIndex}`,
|
||||||
|
{ error: err.message, dataStoreName });
|
||||||
|
if (err.code === 'ContainerNotFound') {
|
||||||
|
return cb(errors.NoSuchBucket);
|
||||||
|
}
|
||||||
|
return cb(errors.InternalError.customizeDescription(
|
||||||
|
`Error returned from Azure: ${err}`));
|
||||||
|
});
|
||||||
|
|
||||||
|
resultsCollector.on('done', (err, results) => {
|
||||||
|
if (err) {
|
||||||
|
log.error('Error putting last subpart to Azure',
|
||||||
|
{ error: err.message, dataStoreName });
|
||||||
|
if (err.code === 'ContainerNotFound') {
|
||||||
|
return cb(errors.NoSuchBucket);
|
||||||
|
}
|
||||||
|
return cb(errors.InternalError.customizeDescription(
|
||||||
|
`Error returned from Azure: ${err}`));
|
||||||
|
}
|
||||||
|
const numberSubParts = results.length;
|
||||||
|
const totalLength = streamInterface.getTotalBytesStreamed();
|
||||||
|
log.trace('successfully put subparts to Azure',
|
||||||
|
{ numberSubParts, totalLength });
|
||||||
|
hashedStream.on('hashed', () => cb(null, hashedStream.completedHash,
|
||||||
|
numberSubParts, totalLength));
|
||||||
|
|
||||||
|
// in case the hashed event was already emitted before the
|
||||||
|
// event handler was registered:
|
||||||
|
if (hashedStream.completedHash) {
|
||||||
|
hashedStream.removeAllListeners('hashed');
|
||||||
|
return cb(null, hashedStream.completedHash, numberSubParts,
|
||||||
|
totalLength);
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
});
|
||||||
|
|
||||||
|
const currentStream = streamInterface.getCurrentStream();
|
||||||
|
// start first put to Azure before we start streaming the data
|
||||||
|
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo,
|
||||||
|
currentStream, 0, resultsCollector, log, cb);
|
||||||
|
|
||||||
|
request.pipe(hashedStream);
|
||||||
|
hashedStream.on('end', () => {
|
||||||
|
resultsCollector.enableComplete();
|
||||||
|
streamInterface.endStreaming();
|
||||||
|
});
|
||||||
|
hashedStream.on('data', data => {
|
||||||
|
const currentLength = streamInterface.getLengthCounter();
|
||||||
|
if (currentLength + data.length > azureMpuUtils.maxSubPartSize) {
|
||||||
|
const bytesToMaxSize = azureMpuUtils.maxSubPartSize - currentLength;
|
||||||
|
const firstChunk = bytesToMaxSize === 0 ? data :
|
||||||
|
data.slice(bytesToMaxSize);
|
||||||
|
if (bytesToMaxSize !== 0) {
|
||||||
|
// if we have not streamed full subpart, write enough of the
|
||||||
|
// data chunk to stream the correct length
|
||||||
|
streamInterface.write(data.slice(0, bytesToMaxSize));
|
||||||
|
}
|
||||||
|
const { nextStream, subPartIndex } =
|
||||||
|
streamInterface.transitionToNextStream();
|
||||||
|
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo,
|
||||||
|
nextStream, subPartIndex, resultsCollector, log, cb);
|
||||||
|
streamInterface.write(firstChunk);
|
||||||
|
} else {
|
||||||
|
streamInterface.write(data);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
module.exports = azureMpuUtils;
|
|
@ -0,0 +1,107 @@
|
||||||
|
const querystring = require('querystring');
|
||||||
|
const escapeForXml = require('./escapeForXml');
|
||||||
|
|
||||||
|
const convertMethods = {};
|
||||||
|
|
||||||
|
convertMethods.completeMultipartUpload = xmlParams => {
|
||||||
|
const escapedBucketName = escapeForXml(xmlParams.bucketName);
|
||||||
|
return '<?xml version="1.0" encoding="UTF-8"?>' +
|
||||||
|
'<CompleteMultipartUploadResult ' +
|
||||||
|
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||||
|
`<Location>http://${escapedBucketName}.` +
|
||||||
|
`${escapeForXml(xmlParams.hostname)}/` +
|
||||||
|
`${escapeForXml(xmlParams.objectKey)}</Location>` +
|
||||||
|
`<Bucket>${escapedBucketName}</Bucket>` +
|
||||||
|
`<Key>${escapeForXml(xmlParams.objectKey)}</Key>` +
|
||||||
|
`<ETag>${escapeForXml(xmlParams.eTag)}</ETag>` +
|
||||||
|
'</CompleteMultipartUploadResult>';
|
||||||
|
};
|
||||||
|
|
||||||
|
convertMethods.initiateMultipartUpload = xmlParams =>
|
||||||
|
'<?xml version="1.0" encoding="UTF-8"?>' +
|
||||||
|
'<InitiateMultipartUploadResult ' +
|
||||||
|
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||||
|
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>` +
|
||||||
|
`<Key>${escapeForXml(xmlParams.objectKey)}</Key>` +
|
||||||
|
`<UploadId>${escapeForXml(xmlParams.uploadId)}</UploadId>` +
|
||||||
|
'</InitiateMultipartUploadResult>';
|
||||||
|
|
||||||
|
convertMethods.listMultipartUploads = xmlParams => {
|
||||||
|
const xml = [];
|
||||||
|
const l = xmlParams.list;
|
||||||
|
|
||||||
|
xml.push('<?xml version="1.0" encoding="UTF-8"?>',
|
||||||
|
'<ListMultipartUploadsResult ' +
|
||||||
|
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
|
||||||
|
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`
|
||||||
|
);
|
||||||
|
|
||||||
|
// For certain XML elements, if it is `undefined`, AWS returns either an
|
||||||
|
// empty tag or does not include it. Hence the `optional` key in the params.
|
||||||
|
const params = [
|
||||||
|
{ tag: 'KeyMarker', value: xmlParams.keyMarker },
|
||||||
|
{ tag: 'UploadIdMarker', value: xmlParams.uploadIdMarker },
|
||||||
|
{ tag: 'NextKeyMarker', value: l.NextKeyMarker, optional: true },
|
||||||
|
{ tag: 'NextUploadIdMarker', value: l.NextUploadIdMarker,
|
||||||
|
optional: true },
|
||||||
|
{ tag: 'Delimiter', value: l.Delimiter, optional: true },
|
||||||
|
{ tag: 'Prefix', value: xmlParams.prefix, optional: true },
|
||||||
|
];
|
||||||
|
|
||||||
|
params.forEach(param => {
|
||||||
|
if (param.value) {
|
||||||
|
xml.push(`<${param.tag}>${escapeForXml(param.value)}` +
|
||||||
|
`</${param.tag}>`);
|
||||||
|
} else if (!param.optional) {
|
||||||
|
xml.push(`<${param.tag} />`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
xml.push(`<MaxUploads>${escapeForXml(l.MaxKeys)}</MaxUploads>`,
|
||||||
|
`<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`
|
||||||
|
);
|
||||||
|
|
||||||
|
l.Uploads.forEach(upload => {
|
||||||
|
const val = upload.value;
|
||||||
|
let key = upload.key;
|
||||||
|
if (xmlParams.encoding === 'url') {
|
||||||
|
key = querystring.escape(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
xml.push('<Upload>',
|
||||||
|
`<Key>${escapeForXml(key)}</Key>`,
|
||||||
|
`<UploadId>${escapeForXml(val.UploadId)}</UploadId>`,
|
||||||
|
'<Initiator>',
|
||||||
|
`<ID>${escapeForXml(val.Initiator.ID)}</ID>`,
|
||||||
|
`<DisplayName>${escapeForXml(val.Initiator.DisplayName)}` +
|
||||||
|
'</DisplayName>',
|
||||||
|
'</Initiator>',
|
||||||
|
'<Owner>',
|
||||||
|
`<ID>${escapeForXml(val.Owner.ID)}</ID>`,
|
||||||
|
`<DisplayName>${escapeForXml(val.Owner.DisplayName)}` +
|
||||||
|
'</DisplayName>',
|
||||||
|
'</Owner>',
|
||||||
|
`<StorageClass>${escapeForXml(val.StorageClass)}` +
|
||||||
|
'</StorageClass>',
|
||||||
|
`<Initiated>${escapeForXml(val.Initiated)}</Initiated>`,
|
||||||
|
'</Upload>'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
l.CommonPrefixes.forEach(prefix => {
|
||||||
|
xml.push('<CommonPrefixes>',
|
||||||
|
`<Prefix>${escapeForXml(prefix)}</Prefix>`,
|
||||||
|
'</CommonPrefixes>'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
xml.push('</ListMultipartUploadsResult>');
|
||||||
|
|
||||||
|
return xml.join('');
|
||||||
|
};
|
||||||
|
|
||||||
|
function convertToXml(method, xmlParams) {
|
||||||
|
return convertMethods[method](xmlParams);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = convertToXml;
|
|
@ -0,0 +1,9 @@
|
||||||
|
const objectUtils = {};
|
||||||
|
|
||||||
|
objectUtils.getHexMD5 = base64MD5 =>
|
||||||
|
Buffer.from(base64MD5, 'base64').toString('hex');
|
||||||
|
|
||||||
|
objectUtils.getBase64MD5 = hexMD5 =>
|
||||||
|
Buffer.from(hexMD5, 'hex').toString('base64');
|
||||||
|
|
||||||
|
module.exports = objectUtils;
|
|
@ -186,7 +186,7 @@ function parseTagFromQuery(tagQuery) {
|
||||||
for (let i = 0; i < pairs.length; i++) {
|
for (let i = 0; i < pairs.length; i++) {
|
||||||
const pair = pairs[i];
|
const pair = pairs[i];
|
||||||
if (!pair) {
|
if (!pair) {
|
||||||
emptyTag ++;
|
emptyTag++;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
const pairArray = pair.split('=');
|
const pairArray = pair.split('=');
|
||||||
|
|
|
@ -45,7 +45,7 @@ function _checkModifiedSince(ifModifiedSinceTime, lastModified) {
|
||||||
if (ifModifiedSinceTime) {
|
if (ifModifiedSinceTime) {
|
||||||
res.present = true;
|
res.present = true;
|
||||||
const checkWith = (new Date(ifModifiedSinceTime)).getTime();
|
const checkWith = (new Date(ifModifiedSinceTime)).getTime();
|
||||||
if (isNaN(checkWith)) {
|
if (Number.isNaN(Number(checkWith))) {
|
||||||
res.error = errors.InvalidArgument;
|
res.error = errors.InvalidArgument;
|
||||||
} else if (lastModified <= checkWith) {
|
} else if (lastModified <= checkWith) {
|
||||||
res.error = errors.NotModified;
|
res.error = errors.NotModified;
|
||||||
|
@ -59,7 +59,7 @@ function _checkUnmodifiedSince(ifUnmodifiedSinceTime, lastModified) {
|
||||||
if (ifUnmodifiedSinceTime) {
|
if (ifUnmodifiedSinceTime) {
|
||||||
res.present = true;
|
res.present = true;
|
||||||
const checkWith = (new Date(ifUnmodifiedSinceTime)).getTime();
|
const checkWith = (new Date(ifUnmodifiedSinceTime)).getTime();
|
||||||
if (isNaN(checkWith)) {
|
if (Number.isNaN(Number(checkWith))) {
|
||||||
res.error = errors.InvalidArgument;
|
res.error = errors.InvalidArgument;
|
||||||
} else if (lastModified > checkWith) {
|
} else if (lastModified > checkWith) {
|
||||||
res.error = errors.PreconditionFailed;
|
res.error = errors.PreconditionFailed;
|
||||||
|
|
|
@ -35,12 +35,12 @@ function checkUnsupportedRoutes(reqMethod) {
|
||||||
|
|
||||||
function checkBucketAndKey(bucketName, objectKey, method, reqQuery,
|
function checkBucketAndKey(bucketName, objectKey, method, reqQuery,
|
||||||
blacklistedPrefixes, log) {
|
blacklistedPrefixes, log) {
|
||||||
// if empty name and request not a list Buckets
|
// bucketName should also be undefined, but is checked below anyway
|
||||||
if (!bucketName && !(method === 'GET' && !objectKey)) {
|
const getServiceCall = (method === 'GET' && !objectKey);
|
||||||
|
// if empty name and request not a list Buckets or preflight request
|
||||||
|
if (!bucketName && !(getServiceCall || method === 'OPTIONS')) {
|
||||||
log.debug('empty bucket name', { method: 'routes' });
|
log.debug('empty bucket name', { method: 'routes' });
|
||||||
return (method !== 'OPTIONS') ?
|
return errors.MethodNotAllowed;
|
||||||
errors.MethodNotAllowed : errors.AccessForbidden
|
|
||||||
.customizeDescription('CORSResponse: Bucket not found');
|
|
||||||
}
|
}
|
||||||
if (bucketName !== undefined && routesUtils.isValidBucketName(bucketName,
|
if (bucketName !== undefined && routesUtils.isValidBucketName(bucketName,
|
||||||
blacklistedPrefixes.bucket) === false) {
|
blacklistedPrefixes.bucket) === false) {
|
||||||
|
@ -186,7 +186,7 @@ function routes(req, res, params, logger) {
|
||||||
|
|
||||||
if (statsClient) {
|
if (statsClient) {
|
||||||
// report new request for stats
|
// report new request for stats
|
||||||
statsClient.reportNewRequest();
|
statsClient.reportNewRequest('s3');
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -16,61 +16,59 @@ function routeDELETE(request, response, api, log, statsClient) {
|
||||||
return routesUtils.responseNoBody(err, corsHeaders, response,
|
return routesUtils.responseNoBody(err, corsHeaders, response,
|
||||||
204, log);
|
204, log);
|
||||||
});
|
});
|
||||||
} else {
|
} else if (request.objectKey === undefined) {
|
||||||
if (request.objectKey === undefined) {
|
if (request.query.website !== undefined) {
|
||||||
if (request.query.website !== undefined) {
|
return api.callApiMethod('bucketDeleteWebsite', request,
|
||||||
return api.callApiMethod('bucketDeleteWebsite', request,
|
response, log, (err, corsHeaders) => {
|
||||||
response, log, (err, corsHeaders) => {
|
|
||||||
routesUtils.statsReport500(err, statsClient);
|
|
||||||
return routesUtils.responseNoBody(err, corsHeaders,
|
|
||||||
response, 204, log);
|
|
||||||
});
|
|
||||||
} else if (request.query.cors !== undefined) {
|
|
||||||
return api.callApiMethod('bucketDeleteCors', request, response,
|
|
||||||
log, (err, corsHeaders) => {
|
|
||||||
routesUtils.statsReport500(err, statsClient);
|
|
||||||
return routesUtils.responseNoBody(err, corsHeaders,
|
|
||||||
response, 204, log);
|
|
||||||
});
|
|
||||||
} else if (request.query.replication !== undefined) {
|
|
||||||
return api.callApiMethod('bucketDeleteReplication', request,
|
|
||||||
response, log, (err, corsHeaders) => {
|
|
||||||
routesUtils.statsReport500(err, statsClient);
|
|
||||||
return routesUtils.responseNoBody(err, corsHeaders,
|
|
||||||
response, 204, log);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
api.callApiMethod('bucketDelete', request, response, log,
|
|
||||||
(err, corsHeaders) => {
|
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseNoBody(err, corsHeaders, response,
|
return routesUtils.responseNoBody(err, corsHeaders,
|
||||||
204, log);
|
response, 204, log);
|
||||||
|
});
|
||||||
|
} else if (request.query.cors !== undefined) {
|
||||||
|
return api.callApiMethod('bucketDeleteCors', request, response,
|
||||||
|
log, (err, corsHeaders) => {
|
||||||
|
routesUtils.statsReport500(err, statsClient);
|
||||||
|
return routesUtils.responseNoBody(err, corsHeaders,
|
||||||
|
response, 204, log);
|
||||||
|
});
|
||||||
|
} else if (request.query.replication !== undefined) {
|
||||||
|
return api.callApiMethod('bucketDeleteReplication', request,
|
||||||
|
response, log, (err, corsHeaders) => {
|
||||||
|
routesUtils.statsReport500(err, statsClient);
|
||||||
|
return routesUtils.responseNoBody(err, corsHeaders,
|
||||||
|
response, 204, log);
|
||||||
});
|
});
|
||||||
} else {
|
|
||||||
if (request.query.tagging !== undefined) {
|
|
||||||
return api.callApiMethod('objectDeleteTagging', request,
|
|
||||||
response, log, (err, resHeaders) => {
|
|
||||||
routesUtils.statsReport500(err, statsClient);
|
|
||||||
return routesUtils.responseNoBody(err, resHeaders,
|
|
||||||
response, 204, log);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
api.callApiMethod('objectDelete', request, response, log,
|
|
||||||
(err, corsHeaders) => {
|
|
||||||
/*
|
|
||||||
* Since AWS expects a 204 regardless of the existence of
|
|
||||||
the object, the errors NoSuchKey and NoSuchVersion should not
|
|
||||||
* be sent back as a response.
|
|
||||||
*/
|
|
||||||
if (err && !err.NoSuchKey && !err.NoSuchVersion) {
|
|
||||||
return routesUtils.responseNoBody(err, corsHeaders,
|
|
||||||
response, null, log);
|
|
||||||
}
|
|
||||||
routesUtils.statsReport500(err, statsClient);
|
|
||||||
return routesUtils.responseNoBody(null, corsHeaders, response,
|
|
||||||
204, log);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
api.callApiMethod('bucketDelete', request, response, log,
|
||||||
|
(err, corsHeaders) => {
|
||||||
|
routesUtils.statsReport500(err, statsClient);
|
||||||
|
return routesUtils.responseNoBody(err, corsHeaders, response,
|
||||||
|
204, log);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
if (request.query.tagging !== undefined) {
|
||||||
|
return api.callApiMethod('objectDeleteTagging', request,
|
||||||
|
response, log, (err, resHeaders) => {
|
||||||
|
routesUtils.statsReport500(err, statsClient);
|
||||||
|
return routesUtils.responseNoBody(err, resHeaders,
|
||||||
|
response, 204, log);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
api.callApiMethod('objectDelete', request, response, log,
|
||||||
|
(err, corsHeaders) => {
|
||||||
|
/*
|
||||||
|
* Since AWS expects a 204 regardless of the existence of
|
||||||
|
the object, the errors NoSuchKey and NoSuchVersion should not
|
||||||
|
* be sent back as a response.
|
||||||
|
*/
|
||||||
|
if (err && !err.NoSuchKey && !err.NoSuchVersion) {
|
||||||
|
return routesUtils.responseNoBody(err, corsHeaders,
|
||||||
|
response, null, log);
|
||||||
|
}
|
||||||
|
routesUtils.statsReport500(err, statsClient);
|
||||||
|
return routesUtils.responseNoBody(null, corsHeaders, response,
|
||||||
|
204, log);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,9 +8,11 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
|
||||||
} else if (request.bucketName === undefined
|
} else if (request.bucketName === undefined
|
||||||
&& request.objectKey === undefined) {
|
&& request.objectKey === undefined) {
|
||||||
// GET service
|
// GET service
|
||||||
api.callApiMethod('serviceGet', request, response, log, (err, xml) => {
|
api.callApiMethod('serviceGet', request, response, log,
|
||||||
|
(err, xml, corsHeaders) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
return routesUtils.responseXMLBody(err, xml, response, log);
|
return routesUtils.responseXMLBody(err, xml, response, log,
|
||||||
|
corsHeaders);
|
||||||
});
|
});
|
||||||
} else if (request.objectKey === undefined) {
|
} else if (request.objectKey === undefined) {
|
||||||
// GET bucket ACL
|
// GET bucket ACL
|
||||||
|
@ -74,6 +76,7 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
/* eslint-disable no-lonely-if */
|
||||||
if (request.query.acl !== undefined) {
|
if (request.query.acl !== undefined) {
|
||||||
// GET object ACL
|
// GET object ACL
|
||||||
api.callApiMethod('objectGetACL', request, response, log,
|
api.callApiMethod('objectGetACL', request, response, log,
|
||||||
|
@ -113,6 +116,7 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
|
||||||
range, log);
|
range, log);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
/* eslint-enable */
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,8 +10,8 @@ function routePUT(request, response, api, log, statsClient) {
|
||||||
|
|
||||||
// content-length for object is handled separately below
|
// content-length for object is handled separately below
|
||||||
const contentLength = request.headers['content-length'];
|
const contentLength = request.headers['content-length'];
|
||||||
if ((contentLength && (isNaN(contentLength) || contentLength < 0)) ||
|
if ((contentLength && (Number.isNaN(Number(contentLength))
|
||||||
contentLength === '') {
|
|| contentLength < 0)) || contentLength === '') {
|
||||||
log.debug('invalid content-length header');
|
log.debug('invalid content-length header');
|
||||||
return routesUtils.responseNoBody(
|
return routesUtils.responseNoBody(
|
||||||
errors.BadRequest, null, response, null, log);
|
errors.BadRequest, null, response, null, log);
|
||||||
|
|
|
@ -19,7 +19,7 @@ function setCommonResponseHeaders(headers, response, log) {
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
log.debug('header can not be added ' +
|
log.debug('header can not be added ' +
|
||||||
'to the response', { header: headers[key],
|
'to the response', { header: headers[key],
|
||||||
error: e.stack, method: 'setCommonResponseHeaders' });
|
error: e.stack, method: 'setCommonResponseHeaders' });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -108,8 +108,8 @@ const XMLResponseBackend = {
|
||||||
});
|
});
|
||||||
setCommonResponseHeaders(corsHeaders, response, log);
|
setCommonResponseHeaders(corsHeaders, response, log);
|
||||||
response.writeHead(errCode.code,
|
response.writeHead(errCode.code,
|
||||||
{ 'Content-Type': 'application/xml',
|
{ 'Content-Type': 'application/xml',
|
||||||
'Content-Length': bytesSent });
|
'Content-Length': bytesSent });
|
||||||
return response.end(xmlStr, 'utf8', () => {
|
return response.end(xmlStr, 'utf8', () => {
|
||||||
log.end().info('responded with error XML', {
|
log.end().info('responded with error XML', {
|
||||||
httpCode: response.statusCode,
|
httpCode: response.statusCode,
|
||||||
|
@ -169,8 +169,8 @@ const JSONResponseBackend = {
|
||||||
});
|
});
|
||||||
setCommonResponseHeaders(corsHeaders, response, log);
|
setCommonResponseHeaders(corsHeaders, response, log);
|
||||||
response.writeHead(errCode.code,
|
response.writeHead(errCode.code,
|
||||||
{ 'Content-Type': 'application/json',
|
{ 'Content-Type': 'application/json',
|
||||||
'Content-Length': bytesSent });
|
'Content-Length': bytesSent });
|
||||||
return response.end(jsonStr, 'utf8', () => {
|
return response.end(jsonStr, 'utf8', () => {
|
||||||
log.end().info('responded with error JSON', {
|
log.end().info('responded with error JSON', {
|
||||||
httpCode: response.statusCode,
|
httpCode: response.statusCode,
|
||||||
|
@ -310,7 +310,8 @@ function _contentLengthMatchesLocations(contentLength, dataLocations) {
|
||||||
(sum, location) => (sum !== undefined && location.size ?
|
(sum, location) => (sum !== undefined && location.size ?
|
||||||
sum + Number.parseInt(location.size, 10) :
|
sum + Number.parseInt(location.size, 10) :
|
||||||
undefined), 0);
|
undefined), 0);
|
||||||
return sumSizes === undefined || sumSizes === contentLength;
|
return sumSizes === undefined ||
|
||||||
|
sumSizes === Number.parseInt(contentLength, 10);
|
||||||
}
|
}
|
||||||
|
|
||||||
const routesUtils = {
|
const routesUtils = {
|
||||||
|
@ -830,6 +831,7 @@ const routesUtils = {
|
||||||
*/
|
*/
|
||||||
isValidBucketName(bucketname, prefixBlacklist) {
|
isValidBucketName(bucketname, prefixBlacklist) {
|
||||||
const ipAddressRegex = new RegExp(/^(\d+\.){3}\d+$/);
|
const ipAddressRegex = new RegExp(/^(\d+\.){3}\d+$/);
|
||||||
|
// eslint-disable-next-line no-useless-escape
|
||||||
const dnsRegex = new RegExp(/^[a-z0-9]+([\.\-]{1}[a-z0-9]+)*$/);
|
const dnsRegex = new RegExp(/^[a-z0-9]+([\.\-]{1}[a-z0-9]+)*$/);
|
||||||
// Must be at least 3 and no more than 63 characters long.
|
// Must be at least 3 and no more than 63 characters long.
|
||||||
if (bucketname.length < 3 || bucketname.length > 63) {
|
if (bucketname.length < 3 || bucketname.length > 63) {
|
||||||
|
@ -878,7 +880,7 @@ const routesUtils = {
|
||||||
*/
|
*/
|
||||||
statsReport500(err, statsClient) {
|
statsReport500(err, statsClient) {
|
||||||
if (statsClient && err && err.code === 500) {
|
if (statsClient && err && err.code === 500) {
|
||||||
statsClient.report500();
|
statsClient.report500('s3');
|
||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
},
|
},
|
||||||
|
|
|
@ -139,8 +139,8 @@ class DataFileStore {
|
||||||
// disable autoClose so that we can close(fd) only after
|
// disable autoClose so that we can close(fd) only after
|
||||||
// fsync() has been called
|
// fsync() has been called
|
||||||
const fileStream = fs.createWriteStream(filePath,
|
const fileStream = fs.createWriteStream(filePath,
|
||||||
{ fd,
|
{ fd,
|
||||||
autoClose: false });
|
autoClose: false });
|
||||||
|
|
||||||
fileStream.on('finish', () => {
|
fileStream.on('finish', () => {
|
||||||
function ok() {
|
function ok() {
|
||||||
|
@ -156,8 +156,8 @@ class DataFileStore {
|
||||||
fs.close(fd);
|
fs.close(fd);
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error('fsync error',
|
log.error('fsync error',
|
||||||
{ method: 'put', key, filePath,
|
{ method: 'put', key, filePath,
|
||||||
error: err });
|
error: err });
|
||||||
return cbOnce(
|
return cbOnce(
|
||||||
errors.InternalError.customizeDescription(
|
errors.InternalError.customizeDescription(
|
||||||
'filesystem error: fsync() returned ' +
|
'filesystem error: fsync() returned ' +
|
||||||
|
@ -247,8 +247,8 @@ class DataFileStore {
|
||||||
return cbOnce(errors.ObjNotFound);
|
return cbOnce(errors.ObjNotFound);
|
||||||
}
|
}
|
||||||
log.error('error retrieving file',
|
log.error('error retrieving file',
|
||||||
{ method: 'get', key, filePath,
|
{ method: 'get', key, filePath,
|
||||||
error: err });
|
error: err });
|
||||||
return cbOnce(
|
return cbOnce(
|
||||||
errors.InternalError.customizeDescription(
|
errors.InternalError.customizeDescription(
|
||||||
`filesystem read error: ${err.code}`));
|
`filesystem read error: ${err.code}`));
|
||||||
|
@ -273,8 +273,8 @@ class DataFileStore {
|
||||||
return callback(errors.ObjNotFound);
|
return callback(errors.ObjNotFound);
|
||||||
}
|
}
|
||||||
log.error('error deleting file', { method: 'delete',
|
log.error('error deleting file', { method: 'delete',
|
||||||
key, filePath,
|
key, filePath,
|
||||||
error: err });
|
error: err });
|
||||||
return callback(errors.InternalError.customizeDescription(
|
return callback(errors.InternalError.customizeDescription(
|
||||||
`filesystem error: unlink() returned ${err.code}`));
|
`filesystem error: unlink() returned ${err.code}`));
|
||||||
}
|
}
|
||||||
|
|
|
@ -96,14 +96,14 @@ class LogConsumer {
|
||||||
this.logger.warn('raft session does not exist yet',
|
this.logger.warn('raft session does not exist yet',
|
||||||
{ raftId: this.raftSession });
|
{ raftId: this.raftSession });
|
||||||
return cb(null, { info: { start: null,
|
return cb(null, { info: { start: null,
|
||||||
end: null } });
|
end: null } });
|
||||||
}
|
}
|
||||||
if (err.code === 416) {
|
if (err.code === 416) {
|
||||||
// requested range not satisfiable
|
// requested range not satisfiable
|
||||||
this.logger.debug('no new log record to process',
|
this.logger.debug('no new log record to process',
|
||||||
{ raftId: this.raftSession });
|
{ raftId: this.raftSession });
|
||||||
return cb(null, { info: { start: null,
|
return cb(null, { info: { start: null,
|
||||||
end: null } });
|
end: null } });
|
||||||
}
|
}
|
||||||
this.logger.error(
|
this.logger.error(
|
||||||
'Error handling record log request', { error: err });
|
'Error handling record log request', { error: err });
|
||||||
|
@ -120,7 +120,7 @@ class LogConsumer {
|
||||||
logResponse.log.forEach(entry => recordStream.write(entry));
|
logResponse.log.forEach(entry => recordStream.write(entry));
|
||||||
recordStream.end();
|
recordStream.end();
|
||||||
return cb(null, { info: logResponse.info,
|
return cb(null, { info: logResponse.info,
|
||||||
log: recordStream });
|
log: recordStream });
|
||||||
}, this.logger.newRequestLogger());
|
}, this.logger.newRequestLogger());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -91,7 +91,7 @@ class MetadataFileClient {
|
||||||
return done(err);
|
return done(err);
|
||||||
}
|
}
|
||||||
this.logger.info('connected to record log service', { url });
|
this.logger.info('connected to record log service', { url });
|
||||||
return done();
|
return done(null, logProxy);
|
||||||
});
|
});
|
||||||
return logProxy;
|
return logProxy;
|
||||||
}
|
}
|
||||||
|
|
|
@ -134,8 +134,8 @@ class MetadataFileServer {
|
||||||
db to clients */
|
db to clients */
|
||||||
this.server = new rpc.RPCServer(
|
this.server = new rpc.RPCServer(
|
||||||
{ logger: this.logger,
|
{ logger: this.logger,
|
||||||
streamMaxPendingAck: this.streamMaxPendingAck,
|
streamMaxPendingAck: this.streamMaxPendingAck,
|
||||||
streamAckTimeoutMs: this.streamAckTimeoutMs });
|
streamAckTimeoutMs: this.streamAckTimeoutMs });
|
||||||
this.server.listen(this.port, this.bindAddress);
|
this.server.listen(this.port, this.bindAddress);
|
||||||
this.servers.push(this.server);
|
this.servers.push(this.server);
|
||||||
|
|
||||||
|
@ -147,7 +147,7 @@ class MetadataFileServer {
|
||||||
}
|
}
|
||||||
|
|
||||||
this.servers.forEach(server => {
|
this.servers.forEach(server => {
|
||||||
server.registerServices.apply(server, this.services);
|
server.registerServices(...this.services);
|
||||||
});
|
});
|
||||||
|
|
||||||
this.genUUIDIfNotExists();
|
this.genUUIDIfNotExists();
|
||||||
|
|
|
@ -54,7 +54,7 @@ module.exports.setDirSyncFlag = function setDirSyncFlag(path, logger) {
|
||||||
if (doLog) {
|
if (doLog) {
|
||||||
if (error) {
|
if (error) {
|
||||||
logger.warn(warning, { error: error.message,
|
logger.warn(warning, { error: error.message,
|
||||||
errorStack: error.stack });
|
errorStack: error.stack });
|
||||||
} else {
|
} else {
|
||||||
logger.warn(warning);
|
logger.warn(warning);
|
||||||
}
|
}
|
||||||
|
|
|
@ -179,6 +179,7 @@ class TestMatrix {
|
||||||
this.listOfSpecialCase.forEach(specialCase => {
|
this.listOfSpecialCase.forEach(specialCase => {
|
||||||
const keyCase = specialCase.key;
|
const keyCase = specialCase.key;
|
||||||
const result = Object.keys(keyCase).every(currentKey => {
|
const result = Object.keys(keyCase).every(currentKey => {
|
||||||
|
// eslint-disable-next-line no-prototype-builtins
|
||||||
if (this.params.hasOwnProperty(currentKey) === false) {
|
if (this.params.hasOwnProperty(currentKey) === false) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -192,7 +193,7 @@ class TestMatrix {
|
||||||
*/
|
*/
|
||||||
if (result === true) {
|
if (result === true) {
|
||||||
callFunction(this, matrixGenerated,
|
callFunction(this, matrixGenerated,
|
||||||
specialCase.callback, specialCase.description);
|
specialCase.callback, specialCase.description);
|
||||||
aSpecialCaseWasFound = true;
|
aSpecialCaseWasFound = true;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -185,7 +185,7 @@ class Version {
|
||||||
}
|
}
|
||||||
|
|
||||||
function isMasterKey(key) {
|
function isMasterKey(key) {
|
||||||
return ! key.includes(VID_SEP);
|
return !key.includes(VID_SEP);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -80,7 +80,7 @@ class VersioningRequestProcessor {
|
||||||
return callback(err);
|
return callback(err);
|
||||||
}
|
}
|
||||||
// answer if value is not a place holder for deletion
|
// answer if value is not a place holder for deletion
|
||||||
if (! Version.isPHD(data)) {
|
if (!Version.isPHD(data)) {
|
||||||
return callback(null, data);
|
return callback(null, data);
|
||||||
}
|
}
|
||||||
logger.debug('master version is a PHD, getting the latest version',
|
logger.debug('master version is a PHD, getting the latest version',
|
||||||
|
@ -133,16 +133,16 @@ class VersioningRequestProcessor {
|
||||||
logger.info('no other versions', { request });
|
logger.info('no other versions', { request });
|
||||||
this.dequeueGet(request, errors.ObjNotFound);
|
this.dequeueGet(request, errors.ObjNotFound);
|
||||||
return this.repairMaster(request, logger,
|
return this.repairMaster(request, logger,
|
||||||
{ type: 'del',
|
{ type: 'del',
|
||||||
value: list[0].value });
|
value: list[0].value });
|
||||||
}
|
}
|
||||||
// need repair
|
// need repair
|
||||||
logger.info('update master by the latest version', { request });
|
logger.info('update master by the latest version', { request });
|
||||||
const nextValue = list[1].value;
|
const nextValue = list[1].value;
|
||||||
this.dequeueGet(request, null, nextValue);
|
this.dequeueGet(request, null, nextValue);
|
||||||
return this.repairMaster(request, logger,
|
return this.repairMaster(request, logger,
|
||||||
{ type: 'put', value: list[0].value,
|
{ type: 'put', value: list[0].value,
|
||||||
nextValue });
|
nextValue });
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -352,7 +352,7 @@ class VersioningRequestProcessor {
|
||||||
// no versioning or versioning configuration off
|
// no versioning or versioning configuration off
|
||||||
if (!(options && options.versionId)) {
|
if (!(options && options.versionId)) {
|
||||||
return this.writeCache.batch({ db,
|
return this.writeCache.batch({ db,
|
||||||
array: [{ key, type: 'del' }] },
|
array: [{ key, type: 'del' }] },
|
||||||
logger, callback);
|
logger, callback);
|
||||||
}
|
}
|
||||||
// version specific DELETE
|
// version specific DELETE
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "6.9.5"
|
"node": "6.9.5"
|
||||||
},
|
},
|
||||||
"version": "7.0.1",
|
"version": "7.2.0",
|
||||||
"description": "Common utilities for the S3 project components",
|
"description": "Common utilities for the S3 project components",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
@ -21,9 +21,12 @@
|
||||||
"async": "~2.1.5",
|
"async": "~2.1.5",
|
||||||
"debug": "~2.3.3",
|
"debug": "~2.3.3",
|
||||||
"diskusage": "^0.2.2",
|
"diskusage": "^0.2.2",
|
||||||
|
"ioredis": "2.4.0",
|
||||||
"ipaddr.js": "1.2.0",
|
"ipaddr.js": "1.2.0",
|
||||||
|
"joi": "^10.6",
|
||||||
"level": "~1.6.0",
|
"level": "~1.6.0",
|
||||||
"level-sublevel": "~6.6.1",
|
"level-sublevel": "~6.6.1",
|
||||||
|
"simple-glob": "^0.1",
|
||||||
"socket.io": "~1.7.3",
|
"socket.io": "~1.7.3",
|
||||||
"socket.io-client": "~1.7.3",
|
"socket.io-client": "~1.7.3",
|
||||||
"utf8": "2.1.2",
|
"utf8": "2.1.2",
|
||||||
|
|
|
@ -72,4 +72,20 @@ describe('AuthInfo class constructor', () => {
|
||||||
const publicUser = new AuthInfo({ canonicalID: constants.publicId });
|
const publicUser = new AuthInfo({ canonicalID: constants.publicId });
|
||||||
assert.strictEqual(publicUser.isRequesterPublicUser(), true);
|
assert.strictEqual(publicUser.isRequesterPublicUser(), true);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should have a working isRequesterAServiceAccount() method', () => {
|
||||||
|
assert.strictEqual(authInfo.isRequesterAServiceAccount(), false);
|
||||||
|
const serviceAccount = new AuthInfo({
|
||||||
|
canonicalID: `${constants.zenkoServiceAccount}/clueso` });
|
||||||
|
assert.strictEqual(serviceAccount.isRequesterAServiceAccount(), true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should have a working isRequesterThisServiceAccount() method', () => {
|
||||||
|
const serviceAccount = new AuthInfo({
|
||||||
|
canonicalID: `${constants.zenkoServiceAccount}/clueso` });
|
||||||
|
assert.strictEqual(
|
||||||
|
serviceAccount.isRequesterThisServiceAccount('backbeat'), false);
|
||||||
|
assert.strictEqual(
|
||||||
|
serviceAccount.isRequesterThisServiceAccount('clueso'), true);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
const assert = require('assert');
|
const assert = require('assert');
|
||||||
const werelogs = require('werelogs');
|
const werelogs = require('werelogs');
|
||||||
|
|
||||||
const validateAuthConfig
|
const AuthLoader = require('../../../../lib/auth/auth').inMemory.AuthLoader;
|
||||||
= require('../../../../lib/auth/auth').inMemory.validateAuthConfig;
|
|
||||||
const ref = require('./sample_authdata.json');
|
const ref = require('./sample_authdata.json');
|
||||||
|
|
||||||
werelogs.configure({
|
werelogs.configure({
|
||||||
|
@ -16,7 +15,7 @@ function getParentField(obj, field) {
|
||||||
for (let i = 0; i < fields.length - 1; ++i) {
|
for (let i = 0; i < fields.length - 1; ++i) {
|
||||||
const cur = fields[i];
|
const cur = fields[i];
|
||||||
const n = Number(cur, 10);
|
const n = Number(cur, 10);
|
||||||
if (isNaN(n)) {
|
if (Number.isNaN(n)) {
|
||||||
parent = parent[cur];
|
parent = parent[cur];
|
||||||
} else {
|
} else {
|
||||||
parent = parent[n];
|
parent = parent[n];
|
||||||
|
@ -29,15 +28,19 @@ function getFieldName(field) {
|
||||||
return field.split('.').pop();
|
return field.split('.').pop();
|
||||||
}
|
}
|
||||||
|
|
||||||
function shouldFail(obj, checkSas, done) {
|
function shouldFail(obj, done) {
|
||||||
const res = validateAuthConfig(obj, werelogs, checkSas);
|
const authLoader = new AuthLoader(werelogs);
|
||||||
assert.strictEqual(res, true);
|
authLoader.addAccounts(obj);
|
||||||
|
const res = authLoader.validate();
|
||||||
|
assert.strictEqual(res, false);
|
||||||
done();
|
done();
|
||||||
}
|
}
|
||||||
|
|
||||||
function shouldSuccess(obj, checkSas, done) {
|
function shouldSucceed(obj, done) {
|
||||||
const res = validateAuthConfig(obj, werelogs, checkSas);
|
const authLoader = new AuthLoader(werelogs);
|
||||||
assert.strictEqual(res, false);
|
authLoader.addAccounts(obj);
|
||||||
|
const res = authLoader.validate();
|
||||||
|
assert.strictEqual(res, true);
|
||||||
done();
|
done();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -45,15 +48,15 @@ const should = {
|
||||||
_exec: undefined,
|
_exec: undefined,
|
||||||
missingField: (obj, field, done) => {
|
missingField: (obj, field, done) => {
|
||||||
delete getParentField(obj, field)[getFieldName(field)];
|
delete getParentField(obj, field)[getFieldName(field)];
|
||||||
should._exec(obj, true, done);
|
should._exec(obj, done);
|
||||||
},
|
},
|
||||||
modifiedField: (obj, field, value, done) => {
|
modifiedField: (obj, field, value, done) => {
|
||||||
getParentField(obj, field)[getFieldName(field)] = value;
|
getParentField(obj, field)[getFieldName(field)] = value;
|
||||||
should._exec(obj, true, done);
|
should._exec(obj, done);
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('S3 AuthData Checker', () => {
|
describe('AuthLoader class', () => {
|
||||||
let obj = {};
|
let obj = {};
|
||||||
|
|
||||||
beforeEach(done => {
|
beforeEach(done => {
|
||||||
|
@ -71,18 +74,10 @@ describe('S3 AuthData Checker', () => {
|
||||||
['accounts.0.email', 64],
|
['accounts.0.email', 64],
|
||||||
['accounts.0.arn', undefined],
|
['accounts.0.arn', undefined],
|
||||||
['accounts.0.arn', 64],
|
['accounts.0.arn', 64],
|
||||||
['accounts.0.sasToken', undefined],
|
|
||||||
['accounts.0.sasToken', 64],
|
|
||||||
['accounts.0.canonicalID', undefined],
|
['accounts.0.canonicalID', undefined],
|
||||||
['accounts.0.canonicalID', 64],
|
['accounts.0.canonicalID', 64],
|
||||||
['accounts.0.users', 'not an object'],
|
|
||||||
['accounts.0.users.0.arn', undefined],
|
|
||||||
['accounts.0.users.0.arn', 64],
|
|
||||||
['accounts.0.users.0.email', undefined],
|
|
||||||
['accounts.0.users.0.email', 64],
|
|
||||||
['accounts.0.users.0.keys', undefined],
|
|
||||||
['accounts.0.users.0.keys', 'not an Array'],
|
|
||||||
['accounts.0.keys', 'not an Array'],
|
['accounts.0.keys', 'not an Array'],
|
||||||
|
['accounts.0.keys', undefined],
|
||||||
].forEach(test => {
|
].forEach(test => {
|
||||||
if (test[1] === undefined) {
|
if (test[1] === undefined) {
|
||||||
// Check a failure when deleting required fields
|
// Check a failure when deleting required fields
|
||||||
|
@ -93,7 +88,8 @@ describe('S3 AuthData Checker', () => {
|
||||||
} else {
|
} else {
|
||||||
// Check a failure when the type of field is different than
|
// Check a failure when the type of field is different than
|
||||||
// expected
|
// expected
|
||||||
it(`should fail when modified field ${test[0]}${test[1]}`, done => {
|
it(`should fail when modified field ${test[0]} ${test[1]}`,
|
||||||
|
done => {
|
||||||
should._exec = shouldFail;
|
should._exec = shouldFail;
|
||||||
should.modifiedField(obj, test[0], test[1], done);
|
should.modifiedField(obj, test[0], test[1], done);
|
||||||
});
|
});
|
||||||
|
@ -109,52 +105,30 @@ describe('S3 AuthData Checker', () => {
|
||||||
'accounts.0.users',
|
'accounts.0.users',
|
||||||
].forEach(test => {
|
].forEach(test => {
|
||||||
// Check a success when deleting optional fields
|
// Check a success when deleting optional fields
|
||||||
it(`should success when missing field ${test[0]}`, done => {
|
it(`should return success when missing field ${test}`, done => {
|
||||||
should._exec = shouldSuccess;
|
should._exec = shouldSucceed;
|
||||||
should.missingField(obj, test[0], done);
|
should.missingField(obj, test[0], done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should return success if no sasToken and checkSas false', done => {
|
|
||||||
obj.accounts[0].sasToken = undefined;
|
|
||||||
shouldSuccess(obj, false, done);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('Should return error on two same sasTokens and checkSas true', done => {
|
|
||||||
obj.accounts[0].sasToken = obj.accounts[1].sasToken;
|
|
||||||
shouldFail(obj, true, done);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('Should return success on two same sasTokens and checkSas false',
|
|
||||||
done => {
|
|
||||||
obj.accounts[0].sasToken = obj.accounts[1].sasToken;
|
|
||||||
shouldSuccess(obj, false, done);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('Should return error on two same canonicalID', done => {
|
it('Should return error on two same canonicalID', done => {
|
||||||
obj.accounts[0].canonicalID = obj.accounts[1].canonicalID;
|
obj.accounts[0].canonicalID = obj.accounts[1].canonicalID;
|
||||||
shouldFail(obj, null, done);
|
shouldFail(obj, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should return error on two same emails, account-account', done => {
|
it('Should return error on two same emails', done => {
|
||||||
obj.accounts[0].email = obj.accounts[1].email;
|
obj.accounts[0].email = obj.accounts[1].email;
|
||||||
shouldFail(obj, null, done);
|
shouldFail(obj, done);
|
||||||
});
|
|
||||||
|
|
||||||
it('Should return error on two same emails account-user', done => {
|
|
||||||
obj.accounts[0].users[0].email = obj.accounts[1].email;
|
|
||||||
shouldFail(obj, null, done);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should return error on two same arn', done => {
|
it('Should return error on two same arn', done => {
|
||||||
obj.accounts[0].arn = obj.accounts[0].users[0].arn;
|
obj.accounts[0].arn = obj.accounts[1].arn;
|
||||||
shouldFail(obj, null, done);
|
shouldFail(obj, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should return error on two same access key', done => {
|
it('Should return error on two same access key', done => {
|
||||||
obj.accounts[0].keys[0].access =
|
obj.accounts[0].keys[0].access = obj.accounts[1].keys[0].access;
|
||||||
obj.accounts[0].users[0].keys[0].access;
|
shouldFail(obj, done);
|
||||||
shouldFail(obj, null, done);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,47 +0,0 @@
|
||||||
const assert = require('assert');
|
|
||||||
const Backend = require('../../../../lib/auth/auth').inMemory.backend.s3;
|
|
||||||
const DummyRequestLogger = require('../../helpers').DummyRequestLogger;
|
|
||||||
const authData = require('./sample_authdata');
|
|
||||||
|
|
||||||
const backend = new Backend(JSON.parse(JSON.stringify(authData)));
|
|
||||||
const counter = 10;
|
|
||||||
// eslint-disable-next-line arrow-body-style
|
|
||||||
const specificResource = [...Array(counter).keys()].map(i => {
|
|
||||||
return {
|
|
||||||
key: `key${i}`,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
const generalResource = 'bucketName';
|
|
||||||
|
|
||||||
const requestContexts = {
|
|
||||||
constantParams: {
|
|
||||||
generalResource,
|
|
||||||
},
|
|
||||||
parameterize: {
|
|
||||||
specificResource,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
const service = 's3';
|
|
||||||
const userArn = 'aws::iam:123456789012:root';
|
|
||||||
const log = new DummyRequestLogger();
|
|
||||||
// eslint-disable-next-line arrow-body-style
|
|
||||||
const expectedResults = specificResource.map(entry => {
|
|
||||||
return {
|
|
||||||
isAllowed: true,
|
|
||||||
arn: `arn:aws:${service}:::${generalResource}/${entry.key}`,
|
|
||||||
versionId: undefined,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('S3AuthBackend.checkPolicies', () => {
|
|
||||||
it(' should mock successful results', done => {
|
|
||||||
backend.checkPolicies(requestContexts, userArn, log,
|
|
||||||
(err, vaultReturnObject) => {
|
|
||||||
assert.strictEqual(err, null, `Unexpected err: ${err}`);
|
|
||||||
assert.deepStrictEqual(vaultReturnObject, {
|
|
||||||
message: { body: expectedResults },
|
|
||||||
});
|
|
||||||
return done();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
|
@ -2,7 +2,7 @@ const assert = require('assert');
|
||||||
|
|
||||||
const Indexer = require('../../../../lib/auth/in_memory/Indexer');
|
const Indexer = require('../../../../lib/auth/in_memory/Indexer');
|
||||||
const ref = require('./sample_authdata.json');
|
const ref = require('./sample_authdata.json');
|
||||||
const { should } = require('./validateAuthConfig');
|
const { should } = require('./AuthLoader.spec');
|
||||||
|
|
||||||
describe('S3 AuthData Indexer', () => {
|
describe('S3 AuthData Indexer', () => {
|
||||||
let obj = {};
|
let obj = {};
|
||||||
|
@ -28,15 +28,6 @@ describe('S3 AuthData Indexer', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should return user from email', done => {
|
|
||||||
const res = index.getEntityByEmail(obj.accounts[0].users[0].email);
|
|
||||||
assert.strictEqual(typeof res, 'object');
|
|
||||||
assert.strictEqual(res.arn, obj.accounts[0].arn);
|
|
||||||
assert.strictEqual(res.IAMdisplayName,
|
|
||||||
obj.accounts[0].users[0].name);
|
|
||||||
done();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('Should return account from key', done => {
|
it('Should return account from key', done => {
|
||||||
const res = index.getEntityByKey(obj.accounts[0].keys[0].access);
|
const res = index.getEntityByKey(obj.accounts[0].keys[0].access);
|
||||||
assert.strictEqual(typeof res, 'object');
|
assert.strictEqual(typeof res, 'object');
|
||||||
|
@ -44,16 +35,6 @@ describe('S3 AuthData Indexer', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should return user from key', done => {
|
|
||||||
const res = index.getEntityByKey(obj.accounts[0].users[0].keys[0]
|
|
||||||
.access);
|
|
||||||
assert.strictEqual(typeof res, 'object');
|
|
||||||
assert.strictEqual(res.arn, obj.accounts[0].arn);
|
|
||||||
assert.strictEqual(res.IAMdisplayName,
|
|
||||||
obj.accounts[0].users[0].name);
|
|
||||||
done();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should index account without keys', done => {
|
it('should index account without keys', done => {
|
||||||
should._exec = () => {
|
should._exec = () => {
|
||||||
index = new Indexer(obj);
|
index = new Indexer(obj);
|
||||||
|
|
|
@ -2,44 +2,22 @@
|
||||||
"accounts": [{
|
"accounts": [{
|
||||||
"name": "Bart",
|
"name": "Bart",
|
||||||
"email": "sampleaccount1@sampling.com",
|
"email": "sampleaccount1@sampling.com",
|
||||||
"arn": "aws::iam:123456789012:root",
|
"arn": "arn:aws:iam::123456789012:root",
|
||||||
"canonicalID": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be",
|
"canonicalID": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be",
|
||||||
"shortid": "123456789012",
|
"shortid": "123456789012",
|
||||||
"keys": [{
|
"keys": [{
|
||||||
"access": "accessKey1",
|
"access": "accessKey1",
|
||||||
"secret": "verySecretKey1"
|
"secret": "verySecretKey1"
|
||||||
}],
|
}]
|
||||||
"users": [{
|
|
||||||
"name": "Bart Jr",
|
|
||||||
"email": "user1.sampleaccount2@sampling.com",
|
|
||||||
"arn": "aws::iam:123456789013:bart",
|
|
||||||
"keys": [{
|
|
||||||
"access": "USERBARTFUNACCESSKEY",
|
|
||||||
"secret": "verySecretKey1"
|
|
||||||
}]
|
|
||||||
}],
|
|
||||||
"sasToken": "test0"
|
|
||||||
}, {
|
}, {
|
||||||
"name": "Lisa",
|
"name": "Lisa",
|
||||||
"email": "sampleaccount2@sampling.com",
|
"email": "sampleaccount2@sampling.com",
|
||||||
"arn": "aws::iam:accessKey2:user/Lisa",
|
"arn": "arn:aws:iam::123456789013:root",
|
||||||
"canonicalID": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2bf",
|
"canonicalID": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2bf",
|
||||||
"shortid": "123456789012",
|
"shortid": "123456789013",
|
||||||
"keys": [{
|
"keys": [{
|
||||||
"access": "accessKey2",
|
"access": "accessKey2",
|
||||||
"secret": "verySecretKey2"
|
"secret": "verySecretKey2"
|
||||||
}],
|
}]
|
||||||
"sasToken": "test1"
|
|
||||||
}, {
|
|
||||||
"name": "Docker",
|
|
||||||
"email": "sampleaccount3@sampling.com",
|
|
||||||
"arn": "aws::iam:accessKeyDocker:user/Docker",
|
|
||||||
"canonicalID": "sd359df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47eh3hd",
|
|
||||||
"shortid": "123456789012",
|
|
||||||
"keys": [{
|
|
||||||
"access": "accessKeyDocker",
|
|
||||||
"secret": "verySecretKeyDocker"
|
|
||||||
}],
|
|
||||||
"sasToken": "test2"
|
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,13 +2,12 @@
|
||||||
"accounts": [{
|
"accounts": [{
|
||||||
"name": "Zenko",
|
"name": "Zenko",
|
||||||
"email": "sampleaccount4@sampling.com",
|
"email": "sampleaccount4@sampling.com",
|
||||||
"arn": "aws::iam:accessKeyZenko:user/Zenko",
|
"arn": "aws::iam:123456789015:root",
|
||||||
"canonicalID": "newCanId",
|
"canonicalID": "newCanId",
|
||||||
"shortid": "123456789012",
|
"shortid": "123456789015",
|
||||||
"keys": [{
|
"keys": [{
|
||||||
"access": "accessKeyZenko",
|
"access": "accessKeyZenko",
|
||||||
"secret": "verySecretKeyZenko"
|
"secret": "verySecretKeyZenko"
|
||||||
}],
|
}]
|
||||||
"sasToken": "test2"
|
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,119 +7,131 @@ const constructStringToSign =
|
||||||
const DummyRequestLogger = require('../../helpers').DummyRequestLogger;
|
const DummyRequestLogger = require('../../helpers').DummyRequestLogger;
|
||||||
|
|
||||||
const log = new DummyRequestLogger();
|
const log = new DummyRequestLogger();
|
||||||
|
[
|
||||||
describe('constructStringToSign function', () => {
|
{ path: '', desc: 'constructStringToSign function' },
|
||||||
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
{ path: '/_/proxy', desc: 'constructStringToSign function with proxy' },
|
||||||
// latest/API/sig-v4-header-based-auth.html
|
].forEach(item => {
|
||||||
it('should construct a stringToSign in accordance ' +
|
describe(item.desc, () => {
|
||||||
'with AWS rules for a get object request (header auth)', () => {
|
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
||||||
const params = {
|
// latest/API/sig-v4-header-based-auth.html
|
||||||
request: {
|
it('should construct a stringToSign in accordance ' +
|
||||||
method: 'GET',
|
'with AWS rules for a get object request (header auth)', () => {
|
||||||
path: '/test.txt',
|
const path = '/test.txt';
|
||||||
headers: {
|
const params = {
|
||||||
'host': 'examplebucket.s3.amazonaws.com',
|
request: {
|
||||||
'x-amz-date': '20130524T000000Z',
|
method: 'GET',
|
||||||
'authorization': 'AWS4-HMAC-SHA256 Credential' +
|
path: `${item.path}${path}`,
|
||||||
'=AKIAIOSFODNN7EXAMPLE/20130524/us-east-1/' +
|
headers: {
|
||||||
's3/aws4_request,SignedHeaders=host;range;' +
|
'host': 'examplebucket.s3.amazonaws.com',
|
||||||
'x-amz-content-sha256;x-amz-date,Signature=' +
|
'x-amz-date': '20130524T000000Z',
|
||||||
'f0e8bdb87c964420e857bd35b5d6ed310bd44f' +
|
'authorization': 'AWS4-HMAC-SHA256 Credential' +
|
||||||
'0170aba48dd91039c6036bdb41',
|
'=AKIAIOSFODNN7EXAMPLE/20130524/us-east-1/' +
|
||||||
'range': 'bytes=0-9',
|
's3/aws4_request,SignedHeaders=host;range;' +
|
||||||
'x-amz-content-sha256': 'e3b0c44298fc1c149afbf4c' +
|
'x-amz-content-sha256;x-amz-date,Signature=' +
|
||||||
'8996fb92427ae41e4649b934ca495991b7852b855',
|
'f0e8bdb87c964420e857bd35b5d6ed310bd44f' +
|
||||||
|
'0170aba48dd91039c6036bdb41',
|
||||||
|
'range': 'bytes=0-9',
|
||||||
|
'x-amz-content-sha256': 'e3b0c44298fc1c149afbf4c' +
|
||||||
|
'8996fb92427ae41e4649b934ca495991b7852b855',
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
query: {},
|
||||||
query: {},
|
signedHeaders: 'host;range;x-amz-content-sha256;x-amz-date',
|
||||||
signedHeaders: 'host;range;x-amz-content-sha256;x-amz-date',
|
payloadChecksum: 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4' +
|
||||||
payloadChecksum: 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4' +
|
'649b934ca495991b7852b855',
|
||||||
'649b934ca495991b7852b855',
|
credentialScope: '20130524/us-east-1/s3/aws4_request',
|
||||||
credentialScope: '20130524/us-east-1/s3/aws4_request',
|
timestamp: '20130524T000000Z',
|
||||||
timestamp: '20130524T000000Z',
|
log,
|
||||||
log,
|
proxyPath: item.path ? path : undefined,
|
||||||
};
|
};
|
||||||
const expectedOutput = 'AWS4-HMAC-SHA256\n' +
|
const expectedOutput = 'AWS4-HMAC-SHA256\n' +
|
||||||
'20130524T000000Z\n' +
|
'20130524T000000Z\n' +
|
||||||
'20130524/us-east-1/s3/aws4_request\n' +
|
'20130524/us-east-1/s3/aws4_request\n' +
|
||||||
'7344ae5b7ee6c3e7e6b0fe0640412a37625d1fbfff95c48bbb2dc43964946972';
|
'7344ae5b7ee6c3e7e6b0fe0640412a37625d1fbfff95c48bbb2dc439649' +
|
||||||
const actualOutput = constructStringToSign(params);
|
'46972';
|
||||||
assert.strictEqual(actualOutput, expectedOutput);
|
const actualOutput = constructStringToSign(params);
|
||||||
});
|
assert.strictEqual(actualOutput, expectedOutput);
|
||||||
|
});
|
||||||
|
|
||||||
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
||||||
// latest/API/sig-v4-header-based-auth.html
|
// latest/API/sig-v4-header-based-auth.html
|
||||||
it('should construct a stringToSign in accordance ' +
|
it('should construct a stringToSign in accordance ' +
|
||||||
'with AWS rules for a put object request (header auth)', () => {
|
'with AWS rules for a put object request (header auth)', () => {
|
||||||
const params = {
|
const path = '/test$file.text';
|
||||||
request: {
|
const params = {
|
||||||
method: 'PUT',
|
request: {
|
||||||
path: '/test$file.text',
|
method: 'PUT',
|
||||||
headers: {
|
path: `${item.path}${path}`,
|
||||||
'date': 'Fri, 24 May 2013 00:00:00 GMT',
|
headers: {
|
||||||
'host': 'examplebucket.s3.amazonaws.com',
|
'date': 'Fri, 24 May 2013 00:00:00 GMT',
|
||||||
'x-amz-date': '20130524T000000Z',
|
'host': 'examplebucket.s3.amazonaws.com',
|
||||||
'authorization': 'AWS4-HMAC-SHA256 Credential' +
|
'x-amz-date': '20130524T000000Z',
|
||||||
'=AKIAIOSFODNN7EXAMPLE/20130524/us-east-1' +
|
'authorization': 'AWS4-HMAC-SHA256 Credential' +
|
||||||
'/s3/aws4_request,SignedHeaders=date;host;' +
|
'=AKIAIOSFODNN7EXAMPLE/20130524/us-east-1' +
|
||||||
'x-amz-content-sha256;x-amz-date;x-amz-storage' +
|
'/s3/aws4_request,SignedHeaders=date;host;' +
|
||||||
'-class,Signature=98ad721746da40c64f1a55b78f14c2' +
|
'x-amz-content-sha256;x-amz-date;x-amz-storage' +
|
||||||
'38d841ea1380cd77a1b5971af0ece108bd',
|
'-class,Signature=98ad721746da40c64f1a55b78f14c2' +
|
||||||
'x-amz-storage-class': 'REDUCED_REDUNDANCY',
|
'38d841ea1380cd77a1b5971af0ece108bd',
|
||||||
'x-amz-content-sha256': '44ce7dd67c959e0d3524ffac1' +
|
'x-amz-storage-class': 'REDUCED_REDUNDANCY',
|
||||||
'771dfbba87d2b6b4b4e99e42034a8b803f8b072',
|
'x-amz-content-sha256': '44ce7dd67c959e0d3524ffac1' +
|
||||||
|
'771dfbba87d2b6b4b4e99e42034a8b803f8b072',
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
query: {},
|
||||||
query: {},
|
signedHeaders: 'date;host;x-amz-content-sha256;' +
|
||||||
signedHeaders: 'date;host;x-amz-content-sha256;' +
|
'x-amz-date;x-amz-storage-class',
|
||||||
'x-amz-date;x-amz-storage-class',
|
payloadChecksum: '44ce7dd67c959e0d3524ffac1771dfbba8' +
|
||||||
payloadChecksum: '44ce7dd67c959e0d3524ffac1771dfbba8' +
|
'7d2b6b4b4e99e42034a8b803f8b072',
|
||||||
'7d2b6b4b4e99e42034a8b803f8b072',
|
credentialScope: '20130524/us-east-1/s3/aws4_request',
|
||||||
credentialScope: '20130524/us-east-1/s3/aws4_request',
|
timestamp: '20130524T000000Z',
|
||||||
timestamp: '20130524T000000Z',
|
log,
|
||||||
log,
|
proxyPath: item.path ? path : undefined,
|
||||||
};
|
};
|
||||||
const expectedOutput = 'AWS4-HMAC-SHA256\n' +
|
const expectedOutput = 'AWS4-HMAC-SHA256\n' +
|
||||||
'20130524T000000Z\n' +
|
'20130524T000000Z\n' +
|
||||||
'20130524/us-east-1/s3/aws4_request\n' +
|
'20130524/us-east-1/s3/aws4_request\n' +
|
||||||
'9e0e90d9c76de8fa5b200d8c849cd5b8dc7a3' +
|
'9e0e90d9c76de8fa5b200d8c849cd5b8dc7a3' +
|
||||||
'be3951ddb7f6a76b4158342019d';
|
'be3951ddb7f6a76b4158342019d';
|
||||||
const actualOutput = constructStringToSign(params);
|
const actualOutput = constructStringToSign(params);
|
||||||
assert.strictEqual(actualOutput, expectedOutput);
|
assert.strictEqual(actualOutput, expectedOutput);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
||||||
// latest/API/sig-v4-header-based-auth.html
|
// latest/API/sig-v4-header-based-auth.html
|
||||||
it('should construct a stringToSign in accordance ' +
|
it('should construct a stringToSign in accordance ' +
|
||||||
'with AWS rules for a pre-signed get url request (query auth)', () => {
|
'with AWS rules for a pre-signed get url request (query auth)',
|
||||||
const params = {
|
() => {
|
||||||
request: {
|
const path = '/test.txt';
|
||||||
method: 'GET',
|
const params = {
|
||||||
path: '/test.txt',
|
request: {
|
||||||
headers: {
|
method: 'GET',
|
||||||
host: 'examplebucket.s3.amazonaws.com',
|
path: `${item.path}${path}`,
|
||||||
},
|
headers: {
|
||||||
},
|
host: 'examplebucket.s3.amazonaws.com',
|
||||||
query: {
|
},
|
||||||
'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
|
},
|
||||||
'X-Amz-Credential': 'AKIAIOSFODNN7EXAMPLE/20130524/' +
|
query: {
|
||||||
'us-east-1/s3/aws4_request',
|
'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
|
||||||
'X-Amz-Date': '20130524T000000Z',
|
'X-Amz-Credential': 'AKIAIOSFODNN7EXAMPLE/20130524/' +
|
||||||
'X-Amz-Expires': '86400',
|
'us-east-1/s3/aws4_request',
|
||||||
'X-Amz-SignedHeaders': 'host',
|
'X-Amz-Date': '20130524T000000Z',
|
||||||
},
|
'X-Amz-Expires': '86400',
|
||||||
signedHeaders: 'host',
|
'X-Amz-SignedHeaders': 'host',
|
||||||
payloadChecksum: 'UNSIGNED-PAYLOAD',
|
},
|
||||||
credentialScope: '20130524/us-east-1/s3/aws4_request',
|
signedHeaders: 'host',
|
||||||
timestamp: '20130524T000000Z',
|
payloadChecksum: 'UNSIGNED-PAYLOAD',
|
||||||
log,
|
credentialScope: '20130524/us-east-1/s3/aws4_request',
|
||||||
};
|
timestamp: '20130524T000000Z',
|
||||||
const expectedOutput = 'AWS4-HMAC-SHA256\n' +
|
log,
|
||||||
'20130524T000000Z\n' +
|
proxyPath: item.path ? path : undefined,
|
||||||
'20130524/us-east-1/s3/aws4_request\n' +
|
};
|
||||||
'3bfa292879f6447bbcda7001decf97f4a54d' +
|
const expectedOutput = 'AWS4-HMAC-SHA256\n' +
|
||||||
'c650c8942174ae0a9121cf58ad04';
|
'20130524T000000Z\n' +
|
||||||
const actualOutput = constructStringToSign(params);
|
'20130524/us-east-1/s3/aws4_request\n' +
|
||||||
assert.strictEqual(actualOutput, expectedOutput);
|
'3bfa292879f6447bbcda7001decf97f4a54d' +
|
||||||
|
'c650c8942174ae0a9121cf58ad04';
|
||||||
|
const actualOutput = constructStringToSign(params);
|
||||||
|
assert.strictEqual(actualOutput, expectedOutput);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -111,4 +111,4 @@ class DummyRequestLogger {
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = { makeid, timeDiff, makeAuthInfo,
|
module.exports = { makeid, timeDiff, makeAuthInfo,
|
||||||
createAlteredRequest, zpad, DummyRequestLogger };
|
createAlteredRequest, zpad, DummyRequestLogger };
|
||||||
|
|
|
@ -11,7 +11,7 @@ describe('Matrix', () => {
|
||||||
password: 'nopassword',
|
password: 'nopassword',
|
||||||
delimiter: [undefined, '/', '', '|', 'abcd'],
|
delimiter: [undefined, '/', '', '|', 'abcd'],
|
||||||
prefix: [undefined, '/validPrefix/ThatIsNot/InTheSet',
|
prefix: [undefined, '/validPrefix/ThatIsNot/InTheSet',
|
||||||
'/validPrefix/ThatIsPresent/InTheTestSet', 'InvalidPrefix'],
|
'/validPrefix/ThatIsPresent/InTheTestSet', 'InvalidPrefix'],
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -58,7 +58,7 @@ describe('Matrix', () => {
|
||||||
++numberOfCallV2;
|
++numberOfCallV2;
|
||||||
done();
|
done();
|
||||||
}, 'should use v2 auth').testSpecialCase(ifParams, (testMatrix,
|
}, 'should use v2 auth').testSpecialCase(ifParams, (testMatrix,
|
||||||
done) => {
|
done) => {
|
||||||
assert.equal(testMatrix.params.auth === 'v4', true);
|
assert.equal(testMatrix.params.auth === 'v4', true);
|
||||||
++numberOfCallV4;
|
++numberOfCallV4;
|
||||||
done();
|
done();
|
||||||
|
|
|
@ -0,0 +1,78 @@
|
||||||
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
|
const assert = require('assert');
|
||||||
|
|
||||||
|
const RedisClient = require('../../../lib/metrics/RedisClient');
|
||||||
|
const StatsClient = require('../../../lib/metrics/StatsClient');
|
||||||
|
|
||||||
|
// setup redis client
|
||||||
|
const config = {
|
||||||
|
host: '127.0.0.1',
|
||||||
|
port: 6379,
|
||||||
|
enableOfflineQueue: false,
|
||||||
|
};
|
||||||
|
const fakeLogger = {
|
||||||
|
trace: () => {},
|
||||||
|
error: () => {},
|
||||||
|
};
|
||||||
|
const redisClient = new RedisClient(config, fakeLogger);
|
||||||
|
|
||||||
|
// setup stats client
|
||||||
|
const STATS_INTERVAL = 5; // 5 seconds
|
||||||
|
const STATS_EXPIRY = 30; // 30 seconds
|
||||||
|
const statsClient = new StatsClient(redisClient, STATS_INTERVAL, STATS_EXPIRY);
|
||||||
|
|
||||||
|
describe('StatsClient class', () => {
|
||||||
|
const id = 'arsenal-test';
|
||||||
|
|
||||||
|
afterEach(() => redisClient.clear(() => {}));
|
||||||
|
|
||||||
|
it('should correctly record a new request', () => {
|
||||||
|
statsClient.reportNewRequest(id, (err, res) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert(Array.isArray(res));
|
||||||
|
assert.equal(res.length, 2);
|
||||||
|
|
||||||
|
const expected = [[null, 1], [null, 1]];
|
||||||
|
assert.deepEqual(res, expected);
|
||||||
|
});
|
||||||
|
|
||||||
|
statsClient.reportNewRequest(id, (err, res) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert(Array.isArray(res));
|
||||||
|
assert.equal(res.length, 2);
|
||||||
|
|
||||||
|
const expected = [[null, 2], [null, 1]];
|
||||||
|
assert.deepEqual(res, expected);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should correctly record a 500 on the server', () => {
|
||||||
|
statsClient.report500(id, (err, res) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert(Array.isArray(res));
|
||||||
|
assert.equal(res.length, 2);
|
||||||
|
|
||||||
|
const expected = [[null, 1], [null, 1]];
|
||||||
|
assert.deepEqual(res, expected);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should respond back with total requests', () => {
|
||||||
|
statsClient.reportNewRequest(id, err => {
|
||||||
|
assert.ifError(err);
|
||||||
|
});
|
||||||
|
statsClient.report500(id, err => {
|
||||||
|
assert.ifError(err);
|
||||||
|
});
|
||||||
|
statsClient.getStats(fakeLogger, id, (err, res) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.equal(typeof res, 'object');
|
||||||
|
assert.equal(Object.keys(res).length, 3);
|
||||||
|
assert.equal(res.sampleDuration, STATS_EXPIRY);
|
||||||
|
|
||||||
|
const expected = { 'requests': 1, '500s': 1, 'sampleDuration': 30 };
|
||||||
|
assert.deepEqual(res, expected);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,136 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
|
||||||
|
const ARN = require('../../../lib/models/ARN');
|
||||||
|
|
||||||
|
describe('ARN object model', () => {
|
||||||
|
describe('valid ARNs', () => {
|
||||||
|
[{ arn: 'arn:aws:iam::123456789012:role/backbeat',
|
||||||
|
service: 'iam',
|
||||||
|
accountId: '123456789012',
|
||||||
|
resource: 'role/backbeat',
|
||||||
|
isIAMAccount: false,
|
||||||
|
isIAMUser: false,
|
||||||
|
isIAMRole: true,
|
||||||
|
},
|
||||||
|
{ arn: 'arn:aws:iam::*:role/backbeat',
|
||||||
|
service: 'iam',
|
||||||
|
accountId: '*',
|
||||||
|
resource: 'role/backbeat',
|
||||||
|
isIAMAccount: false,
|
||||||
|
isIAMUser: false,
|
||||||
|
isIAMRole: true,
|
||||||
|
},
|
||||||
|
{ arn: 'arn:aws:iam:::role/backbeat',
|
||||||
|
service: 'iam',
|
||||||
|
accountId: null,
|
||||||
|
resource: 'role/backbeat',
|
||||||
|
isIAMAccount: false,
|
||||||
|
isIAMUser: false,
|
||||||
|
isIAMRole: false, // not a valid role without an account ID
|
||||||
|
},
|
||||||
|
{ arn: 'arn:aws:iam::123456789012:user/bart',
|
||||||
|
service: 'iam',
|
||||||
|
accountId: '123456789012',
|
||||||
|
resource: 'user/bart',
|
||||||
|
isIAMAccount: false,
|
||||||
|
isIAMUser: true,
|
||||||
|
isIAMRole: false,
|
||||||
|
},
|
||||||
|
{ arn: 'arn:aws:iam:::user/bart',
|
||||||
|
service: 'iam',
|
||||||
|
accountId: null,
|
||||||
|
resource: 'user/bart',
|
||||||
|
isIAMAccount: false,
|
||||||
|
isIAMUser: false, // not a valid user without an account ID
|
||||||
|
isIAMRole: false,
|
||||||
|
},
|
||||||
|
{ arn: 'arn:aws:iam::123456789012:root',
|
||||||
|
service: 'iam',
|
||||||
|
accountId: '123456789012',
|
||||||
|
resource: 'root',
|
||||||
|
isIAMAccount: true,
|
||||||
|
isIAMUser: false,
|
||||||
|
isIAMRole: false,
|
||||||
|
},
|
||||||
|
{ arn: 'arn:aws:iam:::root',
|
||||||
|
service: 'iam',
|
||||||
|
accountId: null,
|
||||||
|
resource: 'root',
|
||||||
|
isIAMAccount: false, // not a valid account without an account ID
|
||||||
|
isIAMUser: false,
|
||||||
|
isIAMRole: false,
|
||||||
|
},
|
||||||
|
{ arn: 'arn:aws:s3::123456789012:foo/bar/baz/qux',
|
||||||
|
service: 's3',
|
||||||
|
accountId: '123456789012',
|
||||||
|
resource: 'foo/bar/baz/qux',
|
||||||
|
isIAMAccount: false,
|
||||||
|
isIAMUser: false,
|
||||||
|
isIAMRole: false,
|
||||||
|
},
|
||||||
|
{ arn: 'arn:aws:s3::123456789012:foo:bar/baz/qux',
|
||||||
|
service: 's3',
|
||||||
|
accountId: '123456789012',
|
||||||
|
resource: 'foo:bar/baz/qux',
|
||||||
|
isIAMAccount: false,
|
||||||
|
isIAMUser: false,
|
||||||
|
isIAMRole: false,
|
||||||
|
},
|
||||||
|
{ arn: 'arn:aws:sts::123456789012:foobar',
|
||||||
|
service: 'sts',
|
||||||
|
accountId: '123456789012',
|
||||||
|
resource: 'foobar',
|
||||||
|
isIAMAccount: false,
|
||||||
|
isIAMUser: false,
|
||||||
|
isIAMRole: false,
|
||||||
|
},
|
||||||
|
{ arn: 'arn:aws:ring::123456789012:foobar',
|
||||||
|
service: 'ring',
|
||||||
|
accountId: '123456789012',
|
||||||
|
resource: 'foobar',
|
||||||
|
isIAMAccount: false,
|
||||||
|
isIAMUser: false,
|
||||||
|
isIAMRole: false,
|
||||||
|
},
|
||||||
|
{ arn: 'arn:scality:utapi::123456789012:foobar',
|
||||||
|
service: 'utapi',
|
||||||
|
accountId: '123456789012',
|
||||||
|
resource: 'foobar',
|
||||||
|
isIAMAccount: false,
|
||||||
|
isIAMUser: false,
|
||||||
|
isIAMRole: false,
|
||||||
|
},
|
||||||
|
{ arn: 'arn:scality:sso::123456789012:foobar',
|
||||||
|
service: 'sso',
|
||||||
|
accountId: '123456789012',
|
||||||
|
resource: 'foobar',
|
||||||
|
isIAMAccount: false,
|
||||||
|
isIAMUser: false,
|
||||||
|
isIAMRole: false,
|
||||||
|
},
|
||||||
|
].forEach(arnTest => it(`should accept ARN "${arnTest.arn}"`, () => {
|
||||||
|
const arnObj = ARN.createFromString(arnTest.arn);
|
||||||
|
assert(arnObj instanceof ARN);
|
||||||
|
assert.strictEqual(arnObj.getService(), arnTest.service);
|
||||||
|
assert.strictEqual(arnObj.getAccountId(), arnTest.accountId);
|
||||||
|
assert.strictEqual(arnObj.getResource(), arnTest.resource);
|
||||||
|
assert.strictEqual(arnObj.isIAMAccount(), arnTest.isIAMAccount);
|
||||||
|
assert.strictEqual(arnObj.isIAMUser(), arnTest.isIAMUser);
|
||||||
|
assert.strictEqual(arnObj.isIAMRole(), arnTest.isIAMRole);
|
||||||
|
assert.strictEqual(arnObj.toString(), arnTest.arn);
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
describe('bad ARNs', () => {
|
||||||
|
['',
|
||||||
|
':',
|
||||||
|
'foo:',
|
||||||
|
'arn::iam::123456789012:role/backbeat',
|
||||||
|
'arn:aws:xxx::123456789012:role/backbeat',
|
||||||
|
'arn:aws:s3::123456789012345:role/backbeat',
|
||||||
|
'arn:aws:s3::12345678901b:role/backbeat',
|
||||||
|
].forEach(arn => it(`should fail with invalid ARN "${arn}"`, () => {
|
||||||
|
const res = ARN.createFromString(arn);
|
||||||
|
assert.notStrictEqual(res.error, undefined);
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
});
|
|
@ -1,5 +1,6 @@
|
||||||
const assert = require('assert');
|
const assert = require('assert');
|
||||||
const ObjectMD = require('../../../lib/models/ObjectMD');
|
const ObjectMD = require('../../../lib/models/ObjectMD');
|
||||||
|
const constants = require('../../../lib/constants');
|
||||||
|
|
||||||
describe('ObjectMD class setters/getters', () => {
|
describe('ObjectMD class setters/getters', () => {
|
||||||
let md = null;
|
let md = null;
|
||||||
|
@ -10,7 +11,6 @@ describe('ObjectMD class setters/getters', () => {
|
||||||
|
|
||||||
[
|
[
|
||||||
// In order: data property, value to set/get, default value
|
// In order: data property, value to set/get, default value
|
||||||
['ModelVersion', null, 3],
|
|
||||||
['OwnerDisplayName', null, ''],
|
['OwnerDisplayName', null, ''],
|
||||||
['OwnerDisplayName', 'owner-display-name'],
|
['OwnerDisplayName', 'owner-display-name'],
|
||||||
['OwnerId', null, ''],
|
['OwnerId', null, ''],
|
||||||
|
@ -79,6 +79,8 @@ describe('ObjectMD class setters/getters', () => {
|
||||||
destination: '',
|
destination: '',
|
||||||
storageClass: '',
|
storageClass: '',
|
||||||
role: '',
|
role: '',
|
||||||
|
storageType: '',
|
||||||
|
dataStoreVersionId: '',
|
||||||
}],
|
}],
|
||||||
['ReplicationInfo', {
|
['ReplicationInfo', {
|
||||||
status: 'PENDING',
|
status: 'PENDING',
|
||||||
|
@ -87,6 +89,8 @@ describe('ObjectMD class setters/getters', () => {
|
||||||
storageClass: 'STANDARD',
|
storageClass: 'STANDARD',
|
||||||
role: 'arn:aws:iam::account-id:role/src-resource,' +
|
role: 'arn:aws:iam::account-id:role/src-resource,' +
|
||||||
'arn:aws:iam::account-id:role/dest-resource',
|
'arn:aws:iam::account-id:role/dest-resource',
|
||||||
|
storageType: 'aws_s3',
|
||||||
|
dataStoreVersionId: 'QWY1QQwWn9xJcoz0EgJjJ_t8g4nMYsxo',
|
||||||
}],
|
}],
|
||||||
['DataStoreName', null, ''],
|
['DataStoreName', null, ''],
|
||||||
].forEach(test => {
|
].forEach(test => {
|
||||||
|
@ -110,3 +114,91 @@ describe('ObjectMD class setters/getters', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('ObjectMD import from stored blob', () => {
|
||||||
|
it('should export and import correctly the latest model version', () => {
|
||||||
|
const md = new ObjectMD();
|
||||||
|
const jsonMd = md.getSerialized();
|
||||||
|
const importedRes = ObjectMD.createFromBlob(jsonMd);
|
||||||
|
assert.ifError(importedRes.error);
|
||||||
|
const importedMd = importedRes.result;
|
||||||
|
assert.deepStrictEqual(md, importedMd);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should convert old location to new location', () => {
|
||||||
|
const md = new ObjectMD();
|
||||||
|
const value = md.getValue();
|
||||||
|
value['md-model-version'] = 1;
|
||||||
|
value.location = 'stringLocation';
|
||||||
|
const jsonMd = JSON.stringify(value);
|
||||||
|
const importedRes = ObjectMD.createFromBlob(jsonMd);
|
||||||
|
assert.strictEqual(importedRes.error, undefined);
|
||||||
|
const importedMd = importedRes.result;
|
||||||
|
const valueImported = importedMd.getValue();
|
||||||
|
assert.strictEqual(valueImported['md-model-version'],
|
||||||
|
constants.mdModelVersion);
|
||||||
|
assert.deepStrictEqual(valueImported.location,
|
||||||
|
[{ key: 'stringLocation' }]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should keep null location as is', () => {
|
||||||
|
const md = new ObjectMD();
|
||||||
|
const value = md.getValue();
|
||||||
|
value.location = null;
|
||||||
|
const jsonMd = JSON.stringify(value);
|
||||||
|
const importedRes = ObjectMD.createFromBlob(jsonMd);
|
||||||
|
assert.strictEqual(importedRes.error, undefined);
|
||||||
|
const importedMd = importedRes.result;
|
||||||
|
const valueImported = importedMd.getValue();
|
||||||
|
assert.deepStrictEqual(valueImported.location, null);
|
||||||
|
importedMd.setLocation([]);
|
||||||
|
assert.deepStrictEqual(importedMd.getValue().location, null);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should add dataStoreName attribute if missing', () => {
|
||||||
|
const md = new ObjectMD();
|
||||||
|
const value = md.getValue();
|
||||||
|
value['md-model-version'] = 2;
|
||||||
|
delete value.dataStoreName;
|
||||||
|
const jsonMd = JSON.stringify(value);
|
||||||
|
const importedRes = ObjectMD.createFromBlob(jsonMd);
|
||||||
|
assert.strictEqual(importedRes.error, undefined);
|
||||||
|
const importedMd = importedRes.result;
|
||||||
|
const valueImported = importedMd.getValue();
|
||||||
|
assert.strictEqual(valueImported['md-model-version'],
|
||||||
|
constants.mdModelVersion);
|
||||||
|
assert.notStrictEqual(valueImported.dataStoreName, undefined);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return undefined for dataStoreVersionId if no object location',
|
||||||
|
() => {
|
||||||
|
const md = new ObjectMD();
|
||||||
|
const value = md.getValue();
|
||||||
|
const jsonMd = JSON.stringify(value);
|
||||||
|
const importedRes = ObjectMD.createFromBlob(jsonMd);
|
||||||
|
assert.strictEqual(importedRes.error, undefined);
|
||||||
|
const importedMd = importedRes.result;
|
||||||
|
assert.strictEqual(importedMd.getDataStoreVersionId(), undefined);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should get dataStoreVersionId if saved in object location', () => {
|
||||||
|
const md = new ObjectMD();
|
||||||
|
const dummyLocation = {
|
||||||
|
dataStoreVersionId: 'data-store-version-id',
|
||||||
|
};
|
||||||
|
md.setLocation([dummyLocation]);
|
||||||
|
const value = md.getValue();
|
||||||
|
const jsonMd = JSON.stringify(value);
|
||||||
|
const importedRes = ObjectMD.createFromBlob(jsonMd);
|
||||||
|
assert.strictEqual(importedRes.error, undefined);
|
||||||
|
const importedMd = importedRes.result;
|
||||||
|
assert.strictEqual(importedMd.getDataStoreVersionId(),
|
||||||
|
dummyLocation.dataStoreVersionId);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return an error if blob is malformed JSON', () => {
|
||||||
|
const importedRes = ObjectMD.createFromBlob('{BAD JSON}');
|
||||||
|
assert.notStrictEqual(importedRes.error, undefined);
|
||||||
|
assert.strictEqual(importedRes.result, undefined);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
|
@ -9,16 +9,17 @@ describe('round robin hosts', () => {
|
||||||
caption: 'with { host, port } objects in list',
|
caption: 'with { host, port } objects in list',
|
||||||
hostsList: [{ host: '1.2.3.0', port: 1000 },
|
hostsList: [{ host: '1.2.3.0', port: 1000 },
|
||||||
{ host: '1.2.3.1', port: 1001 },
|
{ host: '1.2.3.1', port: 1001 },
|
||||||
{ host: '1.2.3.2', port: 1002 }],
|
{ host: '1.2.3.2' }],
|
||||||
}, {
|
}, {
|
||||||
caption: 'with "host:port" strings in list',
|
caption: 'with "host:port" strings in list',
|
||||||
hostsList: ['1.2.3.0:1000',
|
hostsList: ['1.2.3.0:1000',
|
||||||
'1.2.3.1:1001',
|
'1.2.3.1:1001',
|
||||||
'1.2.3.2'],
|
'1.2.3.2'],
|
||||||
}].forEach(testCase => describe(testCase.caption, () => {
|
}].forEach(testCase => describe(testCase.caption, () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
roundRobin = new RoundRobin(testCase.hostsList,
|
roundRobin = new RoundRobin(testCase.hostsList,
|
||||||
{ stickyCount: 10 });
|
{ stickyCount: 10,
|
||||||
|
defaultPort: 1002 });
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should pick all hosts in turn', () => {
|
it('should pick all hosts in turn', () => {
|
||||||
|
@ -31,8 +32,7 @@ describe('round robin hosts', () => {
|
||||||
// expect 3 loops of 10 times each of the 3 hosts
|
// expect 3 loops of 10 times each of the 3 hosts
|
||||||
for (let i = 0; i < 90; ++i) {
|
for (let i = 0; i < 90; ++i) {
|
||||||
const hostItem = roundRobin.pickHost();
|
const hostItem = roundRobin.pickHost();
|
||||||
hostsPickCount[hostItem.host] =
|
hostsPickCount[hostItem.host] += 1;
|
||||||
hostsPickCount[hostItem.host] + 1;
|
|
||||||
}
|
}
|
||||||
assert.strictEqual(hostsPickCount['1.2.3.0'], 30);
|
assert.strictEqual(hostsPickCount['1.2.3.0'], 30);
|
||||||
assert.strictEqual(hostsPickCount['1.2.3.1'], 30);
|
assert.strictEqual(hostsPickCount['1.2.3.1'], 30);
|
||||||
|
@ -51,8 +51,7 @@ describe('round robin hosts', () => {
|
||||||
const curHost = roundRobin.getCurrentHost();
|
const curHost = roundRobin.getCurrentHost();
|
||||||
for (let i = 0; i < 10; ++i) {
|
for (let i = 0; i < 10; ++i) {
|
||||||
const hostItem = roundRobin.pickHost();
|
const hostItem = roundRobin.pickHost();
|
||||||
hostsPickCount[hostItem.host] =
|
hostsPickCount[hostItem.host] += 1;
|
||||||
hostsPickCount[hostItem.host] + 1;
|
|
||||||
}
|
}
|
||||||
assert.strictEqual(hostsPickCount[curHost.host], 10);
|
assert.strictEqual(hostsPickCount[curHost.host], 10);
|
||||||
});
|
});
|
||||||
|
@ -67,8 +66,7 @@ describe('round robin hosts', () => {
|
||||||
// expect each host to be picked up 3 times
|
// expect each host to be picked up 3 times
|
||||||
for (let i = 0; i < 9; ++i) {
|
for (let i = 0; i < 9; ++i) {
|
||||||
const hostItem = roundRobin.pickNextHost();
|
const hostItem = roundRobin.pickNextHost();
|
||||||
hostsPickCount[hostItem.host] =
|
hostsPickCount[hostItem.host] += 1;
|
||||||
hostsPickCount[hostItem.host] + 1;
|
|
||||||
}
|
}
|
||||||
assert.strictEqual(hostsPickCount['1.2.3.0'], 3);
|
assert.strictEqual(hostsPickCount['1.2.3.0'], 3);
|
||||||
assert.strictEqual(hostsPickCount['1.2.3.1'], 3);
|
assert.strictEqual(hostsPickCount['1.2.3.1'], 3);
|
||||||
|
@ -101,5 +99,18 @@ describe('round robin hosts', () => {
|
||||||
// eslint-disable-next-line no-new
|
// eslint-disable-next-line no-new
|
||||||
new RoundRobin(['zenko.io', 'zenka.ia']);
|
new RoundRobin(['zenko.io', 'zenka.ia']);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should have set default port if not in bootstrap list', () => {
|
||||||
|
// the current host should be picked 10 times in a row
|
||||||
|
const portMap = {
|
||||||
|
'1.2.3.0': 1000,
|
||||||
|
'1.2.3.1': 1001,
|
||||||
|
'1.2.3.2': 1002,
|
||||||
|
};
|
||||||
|
for (let i = 0; i < 100; ++i) {
|
||||||
|
const hostItem = roundRobin.pickHost();
|
||||||
|
assert.strictEqual(hostItem.port, portMap[hostItem.host]);
|
||||||
|
}
|
||||||
|
});
|
||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
|
|
|
@ -16,13 +16,13 @@ function checkParseRange(rangeHeader, totalLength, expectedRange) {
|
||||||
|
|
||||||
describe('parseRangeSpec function', () => {
|
describe('parseRangeSpec function', () => {
|
||||||
[{ rangeHeader: 'bytes=1000-2000',
|
[{ rangeHeader: 'bytes=1000-2000',
|
||||||
expectedRangeSpec: { start: 1000, end: 2000 } },
|
expectedRangeSpec: { start: 1000, end: 2000 } },
|
||||||
{ rangeHeader: 'bytes=1000-',
|
{ rangeHeader: 'bytes=1000-',
|
||||||
expectedRangeSpec: { start: 1000 } },
|
expectedRangeSpec: { start: 1000 } },
|
||||||
{ rangeHeader: 'bytes=-',
|
{ rangeHeader: 'bytes=-',
|
||||||
expectedRangeSpec: { error: errors.InvalidArgument } },
|
expectedRangeSpec: { error: errors.InvalidArgument } },
|
||||||
{ rangeHeader: 'bytes=10-9',
|
{ rangeHeader: 'bytes=10-9',
|
||||||
expectedRangeSpec: { error: errors.InvalidArgument } },
|
expectedRangeSpec: { error: errors.InvalidArgument } },
|
||||||
].forEach(testCase => {
|
].forEach(testCase => {
|
||||||
const { rangeHeader, expectedRangeSpec } = testCase;
|
const { rangeHeader, expectedRangeSpec } = testCase;
|
||||||
|
|
||||||
|
@ -45,25 +45,25 @@ describe('parseRangeSpec function', () => {
|
||||||
|
|
||||||
describe('getByteRangeFromSpec function', () => {
|
describe('getByteRangeFromSpec function', () => {
|
||||||
[{ rangeSpec: { start: 1000, end: 2000 }, objectSize: 3000,
|
[{ rangeSpec: { start: 1000, end: 2000 }, objectSize: 3000,
|
||||||
expectedByteRange: { range: [1000, 2000] } },
|
expectedByteRange: { range: [1000, 2000] } },
|
||||||
{ rangeSpec: { start: 1000, end: 5000 }, objectSize: 3000,
|
{ rangeSpec: { start: 1000, end: 5000 }, objectSize: 3000,
|
||||||
expectedByteRange: { range: [1000, 2999] } },
|
expectedByteRange: { range: [1000, 2999] } },
|
||||||
{ rangeSpec: { start: 1000 }, objectSize: 3000,
|
{ rangeSpec: { start: 1000 }, objectSize: 3000,
|
||||||
expectedByteRange: { range: [1000, 2999] } },
|
expectedByteRange: { range: [1000, 2999] } },
|
||||||
{ rangeSpec: { suffix: 1000 }, objectSize: 3000,
|
{ rangeSpec: { suffix: 1000 }, objectSize: 3000,
|
||||||
expectedByteRange: { range: [2000, 2999] } },
|
expectedByteRange: { range: [2000, 2999] } },
|
||||||
{ rangeSpec: { suffix: 4000 }, objectSize: 3000,
|
{ rangeSpec: { suffix: 4000 }, objectSize: 3000,
|
||||||
expectedByteRange: { range: [0, 2999] } },
|
expectedByteRange: { range: [0, 2999] } },
|
||||||
{ rangeSpec: { start: 2999 }, objectSize: 3000,
|
{ rangeSpec: { start: 2999 }, objectSize: 3000,
|
||||||
expectedByteRange: { range: [2999, 2999] } },
|
expectedByteRange: { range: [2999, 2999] } },
|
||||||
{ rangeSpec: { start: 3000 }, objectSize: 3000,
|
{ rangeSpec: { start: 3000 }, objectSize: 3000,
|
||||||
expectedByteRange: { error: errors.InvalidRange } },
|
expectedByteRange: { error: errors.InvalidRange } },
|
||||||
{ rangeSpec: { start: 0, end: 10 }, objectSize: 0,
|
{ rangeSpec: { start: 0, end: 10 }, objectSize: 0,
|
||||||
expectedByteRange: { error: errors.InvalidRange } },
|
expectedByteRange: { error: errors.InvalidRange } },
|
||||||
{ rangeSpec: { suffix: 10 }, objectSize: 0,
|
{ rangeSpec: { suffix: 10 }, objectSize: 0,
|
||||||
expectedByteRange: { } },
|
expectedByteRange: { } },
|
||||||
{ rangeSpec: { suffix: 0 }, objectSize: 0,
|
{ rangeSpec: { suffix: 0 }, objectSize: 0,
|
||||||
expectedByteRange: { error: errors.InvalidRange } },
|
expectedByteRange: { error: errors.InvalidRange } },
|
||||||
].forEach(testCase => {
|
].forEach(testCase => {
|
||||||
const { rangeSpec, objectSize, expectedByteRange } = testCase;
|
const { rangeSpec, objectSize, expectedByteRange } = testCase;
|
||||||
|
|
||||||
|
|
|
@ -34,20 +34,20 @@ describe('REST interface for blob data storage', () => {
|
||||||
function setup(done) {
|
function setup(done) {
|
||||||
temp.mkdir('test-REST-data-dir', (err, tempDir) => {
|
temp.mkdir('test-REST-data-dir', (err, tempDir) => {
|
||||||
dataStore = new DataFileStore({ dataPath: tempDir,
|
dataStore = new DataFileStore({ dataPath: tempDir,
|
||||||
noSync: true,
|
noSync: true,
|
||||||
logApi: clientLogApi,
|
logApi: clientLogApi,
|
||||||
});
|
});
|
||||||
server = new RESTServer({ port: 6677,
|
server = new RESTServer({ port: 6677,
|
||||||
dataStore,
|
dataStore,
|
||||||
log: { logLevel: 'info',
|
log: { logLevel: 'info',
|
||||||
dumpLevel: 'error' },
|
dumpLevel: 'error' },
|
||||||
});
|
});
|
||||||
server.setup(() => {
|
server.setup(() => {
|
||||||
server.start();
|
server.start();
|
||||||
client = new RESTClient({ host: 'localhost',
|
client = new RESTClient({ host: 'localhost',
|
||||||
port: 6677,
|
port: 6677,
|
||||||
logApi: clientLogApi,
|
logApi: clientLogApi,
|
||||||
});
|
});
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -150,20 +150,20 @@ describe('REST interface for blob data storage', () => {
|
||||||
// successful range queries
|
// successful range queries
|
||||||
|
|
||||||
[{ range: [10, 20],
|
[{ range: [10, 20],
|
||||||
sliceArgs: [10, 21], contentRange: [10, 20] },
|
sliceArgs: [10, 21], contentRange: [10, 20] },
|
||||||
{ range: [10, undefined],
|
{ range: [10, undefined],
|
||||||
sliceArgs: [10], contentRange: [10, contents.length - 1] },
|
sliceArgs: [10], contentRange: [10, contents.length - 1] },
|
||||||
{ range: [10, 1000],
|
{ range: [10, 1000],
|
||||||
sliceArgs: [10], contentRange: [10, contents.length - 1] },
|
sliceArgs: [10], contentRange: [10, contents.length - 1] },
|
||||||
{ range: [undefined, 10],
|
{ range: [undefined, 10],
|
||||||
sliceArgs: [-10], contentRange: [contents.length - 10,
|
sliceArgs: [-10], contentRange: [contents.length - 10,
|
||||||
contents.length - 1] },
|
contents.length - 1] },
|
||||||
{ range: [undefined, contents.length + 2],
|
{ range: [undefined, contents.length + 2],
|
||||||
sliceArgs: [-(contents.length + 2)],
|
sliceArgs: [-(contents.length + 2)],
|
||||||
contentRange: [0, contents.length - 1] },
|
contentRange: [0, contents.length - 1] },
|
||||||
{ range: [contents.length - 1, undefined],
|
{ range: [contents.length - 1, undefined],
|
||||||
sliceArgs: [-1], contentRange: [contents.length - 1,
|
sliceArgs: [-1], contentRange: [contents.length - 1,
|
||||||
contents.length - 1] }]
|
contents.length - 1] }]
|
||||||
.forEach((test, i) => {
|
.forEach((test, i) => {
|
||||||
const { range, sliceArgs, contentRange } = test;
|
const { range, sliceArgs, contentRange } = test;
|
||||||
it(`should get the correct range ${range[0]}-${range[1]}`,
|
it(`should get the correct range ${range[0]}-${range[1]}`,
|
||||||
|
@ -175,7 +175,7 @@ describe('REST interface for blob data storage', () => {
|
||||||
const value = resp.read();
|
const value = resp.read();
|
||||||
assert.strictEqual(
|
assert.strictEqual(
|
||||||
value.toString(),
|
value.toString(),
|
||||||
contents.slice.apply(contents, sliceArgs));
|
contents.slice(...sliceArgs));
|
||||||
checkContentRange(resp, contentRange[0],
|
checkContentRange(resp, contentRange[0],
|
||||||
contentRange[1]);
|
contentRange[1]);
|
||||||
done();
|
done();
|
||||||
|
|
|
@ -16,21 +16,21 @@ const levelNet = require('../../../../lib/network/rpc/level-net');
|
||||||
// simply forward the API calls to the db as-is
|
// simply forward the API calls to the db as-is
|
||||||
const dbAsyncAPI = {
|
const dbAsyncAPI = {
|
||||||
put: (env, ...args) => {
|
put: (env, ...args) => {
|
||||||
env.subDb.put.apply(env.subDb, args);
|
env.subDb.put(...args);
|
||||||
},
|
},
|
||||||
del: (env, ...args) => {
|
del: (env, ...args) => {
|
||||||
env.subDb.del.apply(env.subDb, args);
|
env.subDb.del(...args);
|
||||||
},
|
},
|
||||||
get: (env, ...args) => {
|
get: (env, ...args) => {
|
||||||
env.subDb.get.apply(env.subDb, args);
|
env.subDb.get(...args);
|
||||||
},
|
},
|
||||||
batch: (env, ...args) => {
|
batch: (env, ...args) => {
|
||||||
env.subDb.batch.apply(env.subDb, args);
|
env.subDb.batch(...args);
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
const dbSyncAPI = {
|
const dbSyncAPI = {
|
||||||
createReadStream:
|
createReadStream:
|
||||||
(env, ...args) => env.subDb.createReadStream.apply(env.subDb, args),
|
(env, ...args) => env.subDb.createReadStream(args),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('level-net - LevelDB over network', () => {
|
describe('level-net - LevelDB over network', () => {
|
||||||
|
|
|
@ -0,0 +1,647 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const Principal = require('../../../lib/policyEvaluator/principal');
|
||||||
|
const RequestContext = require('../../../lib/policyEvaluator/RequestContext');
|
||||||
|
|
||||||
|
const defaultAccountId = '123456789012';
|
||||||
|
const anotherAccountId = '098765432112';
|
||||||
|
const defaultAccountArn = `arn:aws:iam::${defaultAccountId}:root`;
|
||||||
|
const defaultUserArn = `arn:aws:iam::${defaultAccountId}:user/test`;
|
||||||
|
const defaultRole = `arn:aws:iam::${defaultAccountId}:role/role1`;
|
||||||
|
const defaultAssumedRole =
|
||||||
|
`arn:aws:sts::${defaultAccountId}:assumed-role/role1/session`;
|
||||||
|
const defaultSamlProvider =
|
||||||
|
`arn:aws:iam::${defaultAccountId}:saml-provider/provider1`;
|
||||||
|
const defaultFederatedUser =
|
||||||
|
`arn:aws:sts::${defaultAccountId}:federated-user/foo`;
|
||||||
|
const anotherAccountArn = `arn:aws:iam::${anotherAccountId}:root`;
|
||||||
|
const anotherUserArn = `arn:aws:iam::${anotherAccountId}:user/test`;
|
||||||
|
const defaultValids = {
|
||||||
|
AWS: [
|
||||||
|
defaultAccountId,
|
||||||
|
defaultAccountArn,
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
const defaultParams = {
|
||||||
|
log: {
|
||||||
|
trace: () => {},
|
||||||
|
debug: () => {},
|
||||||
|
info: () => {},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('Principal evaluator', () => {
|
||||||
|
[
|
||||||
|
{
|
||||||
|
name: 'anonymous as Principal (effect Allow) -> grant access',
|
||||||
|
statement: {
|
||||||
|
Principal: '*',
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Allow',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'anonymous as Principal (effect Deny) -> deny access',
|
||||||
|
statement: {
|
||||||
|
Principal: '*',
|
||||||
|
Effect: 'Deny',
|
||||||
|
},
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Deny',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'account (arn) in Principal (effect Allow) -> grant access',
|
||||||
|
statement: {
|
||||||
|
Principal: {
|
||||||
|
AWS: defaultAccountArn,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Allow',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'account (arn) in Principal (effect Deny) -> deny access',
|
||||||
|
statement: {
|
||||||
|
Principal: {
|
||||||
|
AWS: [defaultAccountArn],
|
||||||
|
},
|
||||||
|
Effect: 'Deny',
|
||||||
|
},
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Deny',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'account (id) in Principal (effect Allow) -> grant access',
|
||||||
|
statement: {
|
||||||
|
Principal: {
|
||||||
|
AWS: defaultAccountId,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Allow',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'account (id) as Principal (effect Deny) -> deny access',
|
||||||
|
statement: {
|
||||||
|
Principal: {
|
||||||
|
AWS: defaultAccountId,
|
||||||
|
},
|
||||||
|
Effect: 'Deny',
|
||||||
|
},
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Deny',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'account not in Principal (effect Allow) -> neutral',
|
||||||
|
statement: {
|
||||||
|
Principal: {
|
||||||
|
AWS: [anotherAccountId],
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Neutral',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'account not in Principal (effect Deny) -> neutral',
|
||||||
|
statement: {
|
||||||
|
Principal: {
|
||||||
|
AWS: [anotherAccountId],
|
||||||
|
},
|
||||||
|
Effect: 'Deny',
|
||||||
|
},
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Neutral',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name:
|
||||||
|
'multiple account as Principal (effect Allow) -> grant access',
|
||||||
|
statement: {
|
||||||
|
Principal: {
|
||||||
|
AWS: [anotherAccountId, defaultAccountId],
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Allow',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'anonymous as NotPrincipal (effect Allow) -> neutral',
|
||||||
|
statement: {
|
||||||
|
NotPrincipal: '*',
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Neutral',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'anonymous as NotPrincipal (effect Deny) -> neutral',
|
||||||
|
statement: {
|
||||||
|
NotPrincipal: '*',
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Neutral',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'account (arn) in NotPrincipal (effect Allow) -> neutral',
|
||||||
|
statement: {
|
||||||
|
NotPrincipal: {
|
||||||
|
AWS: defaultAccountArn,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Neutral',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'account (arn) in NotPrincipal (effect Deny) -> neutral',
|
||||||
|
statement: {
|
||||||
|
NotPrincipal: {
|
||||||
|
AWS: [anotherAccountArn, defaultAccountArn],
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Neutral',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'account (arn) not in NotPrincipal (effect Allow) -> ' +
|
||||||
|
'grant access',
|
||||||
|
statement: {
|
||||||
|
NotPrincipal: {
|
||||||
|
AWS: anotherAccountArn,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Allow',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'account (arn) not in NotPrincipal (effect Deny) -> ' +
|
||||||
|
'deny access',
|
||||||
|
statement: {
|
||||||
|
NotPrincipal: {
|
||||||
|
AWS: anotherAccountArn,
|
||||||
|
},
|
||||||
|
Effect: 'Deny',
|
||||||
|
},
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Deny',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Other entities than AWS in principal (effect Allow) -> ' +
|
||||||
|
'neutral',
|
||||||
|
statement: {
|
||||||
|
Principal: {
|
||||||
|
Service: 'backbeat',
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Neutral',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Other entities than AWS in principal (effect Deny) -> ' +
|
||||||
|
'neutral',
|
||||||
|
statement: {
|
||||||
|
Principal: {
|
||||||
|
Service: 'backbeat',
|
||||||
|
},
|
||||||
|
Effect: 'Deny',
|
||||||
|
},
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Neutral',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Service in Principal (effect Allow) -> grant access',
|
||||||
|
statement: {
|
||||||
|
Principal: {
|
||||||
|
Service: 'backbeat',
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
valids: {
|
||||||
|
Service: 'backbeat',
|
||||||
|
},
|
||||||
|
result: 'Allow',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'User as principal (effect Allow) -> grant access',
|
||||||
|
statement: {
|
||||||
|
Principal: {
|
||||||
|
AWS: `arn:aws:iam::${defaultAccountId}:user/test`,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
valids: {
|
||||||
|
AWS: `arn:aws:iam::${defaultAccountId}:user/test`,
|
||||||
|
},
|
||||||
|
result: 'Allow',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'User not in Principal (effect Allow) -> neutral',
|
||||||
|
statement: {
|
||||||
|
Principal: {
|
||||||
|
AWS: `arn:aws:iam::${defaultAccountId}:user/test`,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
valids: {
|
||||||
|
AWS: `arn:aws:iam::${defaultAccountId}:user/another/testUser`,
|
||||||
|
},
|
||||||
|
result: 'Neutral',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Role in Principal (effect Allow) -> grant access',
|
||||||
|
statement: {
|
||||||
|
Principal: {
|
||||||
|
AWS: `arn:aws:iam::${defaultAccountId}:role/role1`,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
valids: {
|
||||||
|
AWS: [
|
||||||
|
`arn:aws:iam::${defaultAccountId}:role/role1`,
|
||||||
|
`arn:aws:iam::${defaultAccountId}:assumed-role` +
|
||||||
|
'/role1/session',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
result: 'Allow',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'Role in Principal (effect Deny) -> deny access',
|
||||||
|
statement: {
|
||||||
|
Principal: {
|
||||||
|
AWS: `arn:aws:iam::${defaultAccountId}:role/role1`,
|
||||||
|
},
|
||||||
|
Effect: 'Deny',
|
||||||
|
},
|
||||||
|
valids: {
|
||||||
|
AWS: [
|
||||||
|
`arn:aws:iam::${defaultAccountId}:role/role1`,
|
||||||
|
`arn:aws:iam::${defaultAccountId}:assumed-role` +
|
||||||
|
'/role1/session',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
result: 'Deny',
|
||||||
|
},
|
||||||
|
].forEach(test => {
|
||||||
|
it(`_evaluatePrincipalField(): ${test.name}`, () => {
|
||||||
|
assert.strictEqual(Principal._evaluatePrincipalField(defaultParams,
|
||||||
|
test.statement, test.valids), test.result);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
[
|
||||||
|
{
|
||||||
|
name: 'should allow with a neutral',
|
||||||
|
statement: [
|
||||||
|
{
|
||||||
|
Principal: {
|
||||||
|
AWS: anotherAccountArn,
|
||||||
|
},
|
||||||
|
Effect: 'Deny',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Principal: {
|
||||||
|
AWS: defaultAccountArn,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Allow',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'should deny even with an allow',
|
||||||
|
statement: [
|
||||||
|
{
|
||||||
|
Principal: {
|
||||||
|
AWS: defaultAccountArn,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Principal: {
|
||||||
|
AWS: defaultAccountArn,
|
||||||
|
},
|
||||||
|
Effect: 'Deny',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Deny',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'should deny if no matches',
|
||||||
|
statement: [
|
||||||
|
{
|
||||||
|
Principal: {
|
||||||
|
AWS: anotherAccountArn,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
valids: defaultValids,
|
||||||
|
result: 'Deny',
|
||||||
|
},
|
||||||
|
].forEach(test => {
|
||||||
|
it(`_evaluatePrincipal(): ${test.name}`, () => {
|
||||||
|
const params = {
|
||||||
|
log: defaultParams.log,
|
||||||
|
trustedPolicy: {
|
||||||
|
Statement: test.statement,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const valids = test.valids;
|
||||||
|
assert.strictEqual(Principal._evaluatePrincipal(params, valids),
|
||||||
|
test.result);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
[
|
||||||
|
{
|
||||||
|
name: 'should check user inside the same account',
|
||||||
|
statement: [
|
||||||
|
{
|
||||||
|
Principal: {
|
||||||
|
AWS: defaultUserArn,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
requester: {
|
||||||
|
accountId: defaultAccountId,
|
||||||
|
arn: defaultUserArn,
|
||||||
|
parentArn: null,
|
||||||
|
userType: 'User',
|
||||||
|
},
|
||||||
|
target: {
|
||||||
|
accountId: defaultAccountId,
|
||||||
|
},
|
||||||
|
result: {
|
||||||
|
result: 'Allow',
|
||||||
|
checkAction: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'should deny user inside the same account',
|
||||||
|
statement: [
|
||||||
|
{
|
||||||
|
Principal: {
|
||||||
|
AWS: defaultUserArn,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
requester: {
|
||||||
|
accountId: defaultAccountId,
|
||||||
|
arn: `arn:aws:iam::${defaultAccountId}:user/anotherUser`,
|
||||||
|
parentArn: null,
|
||||||
|
userType: 'User',
|
||||||
|
},
|
||||||
|
target: {
|
||||||
|
accountId: defaultAccountId,
|
||||||
|
},
|
||||||
|
result: {
|
||||||
|
result: 'Deny',
|
||||||
|
checkAction: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'should deny principal if account is deny',
|
||||||
|
statement: [
|
||||||
|
{
|
||||||
|
Principal: {
|
||||||
|
AWS: defaultAccountId,
|
||||||
|
},
|
||||||
|
Effect: 'Deny',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Principal: {
|
||||||
|
AWS: defaultUserArn,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
requester: {
|
||||||
|
accountId: defaultAccountId,
|
||||||
|
arn: defaultUserArn,
|
||||||
|
parentArn: null,
|
||||||
|
userType: 'User',
|
||||||
|
},
|
||||||
|
target: {
|
||||||
|
accountId: defaultAccountId,
|
||||||
|
},
|
||||||
|
result: {
|
||||||
|
result: 'Deny',
|
||||||
|
checkAction: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'should deny assumed role if role is deny',
|
||||||
|
statement: [
|
||||||
|
{
|
||||||
|
Principal: {
|
||||||
|
AWS: defaultRole,
|
||||||
|
},
|
||||||
|
Effect: 'Deny',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Principal: {
|
||||||
|
AWS: defaultAssumedRole,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
requester: {
|
||||||
|
accountId: defaultAccountId,
|
||||||
|
arn: defaultAssumedRole,
|
||||||
|
parentArn: defaultRole,
|
||||||
|
userType: 'AssumedRole',
|
||||||
|
},
|
||||||
|
target: {
|
||||||
|
accountId: defaultAccountId,
|
||||||
|
},
|
||||||
|
result: {
|
||||||
|
result: 'Deny',
|
||||||
|
checkAction: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'should deny user as principal if account is different',
|
||||||
|
statement: [
|
||||||
|
{
|
||||||
|
Principal: {
|
||||||
|
AWS: anotherUserArn,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
requester: {
|
||||||
|
accountId: anotherAccountId,
|
||||||
|
arn: anotherUserArn,
|
||||||
|
parentArn: null,
|
||||||
|
userType: 'User',
|
||||||
|
},
|
||||||
|
target: {
|
||||||
|
accountId: defaultAccountId,
|
||||||
|
},
|
||||||
|
result: {
|
||||||
|
result: 'Deny',
|
||||||
|
checkAction: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'should allow user if account is in principal',
|
||||||
|
statement: [
|
||||||
|
{
|
||||||
|
Principal: {
|
||||||
|
AWS: anotherAccountArn,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
requester: {
|
||||||
|
accountId: anotherAccountId,
|
||||||
|
arn: anotherUserArn,
|
||||||
|
parentArn: null,
|
||||||
|
userType: 'User',
|
||||||
|
},
|
||||||
|
target: {
|
||||||
|
accountId: defaultAccountId,
|
||||||
|
},
|
||||||
|
result: {
|
||||||
|
result: 'Allow',
|
||||||
|
checkAction: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'should allow service as principal',
|
||||||
|
statement: [
|
||||||
|
{
|
||||||
|
Principal: {
|
||||||
|
Service: 'backbeat',
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
requester: {
|
||||||
|
accountId: defaultAccountId,
|
||||||
|
arn: 'backbeat',
|
||||||
|
parentArn: null,
|
||||||
|
userType: 'Service',
|
||||||
|
},
|
||||||
|
target: {
|
||||||
|
accountId: defaultAccountId,
|
||||||
|
},
|
||||||
|
result: {
|
||||||
|
result: 'Allow',
|
||||||
|
checkAction: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'should allow federated provider',
|
||||||
|
statement: [
|
||||||
|
{
|
||||||
|
Principal: {
|
||||||
|
Federated: defaultSamlProvider,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
requester: {
|
||||||
|
accountId: defaultAccountId,
|
||||||
|
arn: defaultFederatedUser,
|
||||||
|
parentArn: defaultSamlProvider,
|
||||||
|
userType: 'Federated',
|
||||||
|
},
|
||||||
|
target: {
|
||||||
|
accountId: defaultAccountId,
|
||||||
|
},
|
||||||
|
result: {
|
||||||
|
result: 'Allow',
|
||||||
|
checkAction: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'should not allow when external id not matching',
|
||||||
|
statement: [
|
||||||
|
{
|
||||||
|
Principal: {
|
||||||
|
AWS: anotherAccountId,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
Condition: {
|
||||||
|
StringEquals: { 'sts:ExternalId': '12345' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
requester: {
|
||||||
|
accountId: anotherAccountId,
|
||||||
|
arn: anotherUserArn,
|
||||||
|
parentArn: null,
|
||||||
|
userType: 'User',
|
||||||
|
},
|
||||||
|
target: {
|
||||||
|
accountId: defaultAccountId,
|
||||||
|
},
|
||||||
|
result: {
|
||||||
|
result: 'Deny',
|
||||||
|
checkAction: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'should allow when external id matching',
|
||||||
|
statement: [
|
||||||
|
{
|
||||||
|
Principal: {
|
||||||
|
AWS: anotherAccountId,
|
||||||
|
},
|
||||||
|
Effect: 'Allow',
|
||||||
|
Condition: {
|
||||||
|
StringEquals: { 'sts:ExternalId': '4321' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
requester: {
|
||||||
|
accountId: anotherAccountId,
|
||||||
|
arn: anotherUserArn,
|
||||||
|
parentArn: null,
|
||||||
|
userType: 'User',
|
||||||
|
},
|
||||||
|
target: {
|
||||||
|
accountId: defaultAccountId,
|
||||||
|
},
|
||||||
|
result: {
|
||||||
|
result: 'Allow',
|
||||||
|
checkAction: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
].forEach(test => {
|
||||||
|
it(`evaluatePrincipal(): ${test.name}`, () => {
|
||||||
|
const rc = new RequestContext({}, {}, '', '', '127.0.0.1',
|
||||||
|
false, 'assumeRole', 'sts', null, {
|
||||||
|
accountid: test.requester.accountId,
|
||||||
|
arn: test.requester.arn,
|
||||||
|
parentArn: test.requester.parentArn,
|
||||||
|
principalType: test.requester.userType,
|
||||||
|
externalId: '4321',
|
||||||
|
}, 'v4', 'V4');
|
||||||
|
|
||||||
|
const params = {
|
||||||
|
log: defaultParams.log,
|
||||||
|
trustedPolicy: {
|
||||||
|
Statement: test.statement,
|
||||||
|
},
|
||||||
|
rc,
|
||||||
|
targetAccountId: test.target.accountId,
|
||||||
|
};
|
||||||
|
const result = Principal.evaluatePrincipal(params);
|
||||||
|
assert.deepStrictEqual(result, test.result);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -252,10 +252,10 @@ describe('policyEvaluator', () => {
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Action = 's3:ListBucket';
|
policy.Statement.Action = 's3:ListBucket';
|
||||||
policy.Statement.Condition = { StringEquals:
|
policy.Statement.Condition = { StringEquals:
|
||||||
{ 's3:prefix': [
|
{ 's3:prefix': [
|
||||||
'home/${aws:username}/*${?}${*}${$}${}?',
|
'home/${aws:username}/*${?}${*}${$}${}?',
|
||||||
'home/',
|
'home/',
|
||||||
] } };
|
] } };
|
||||||
const rcModifiers = {
|
const rcModifiers = {
|
||||||
_query: {
|
_query: {
|
||||||
prefix: 'home/Roger/*?*$${}?',
|
prefix: 'home/Roger/*?*$${}?',
|
||||||
|
@ -385,7 +385,7 @@ describe('policyEvaluator', () => {
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Resource = 'arn:aws:s3:::bucket/*';
|
policy.Statement.Resource = 'arn:aws:s3:::bucket/*';
|
||||||
policy.Statement.Condition = { StringNotEquals:
|
policy.Statement.Condition = { StringNotEquals:
|
||||||
{ 's3:x-amz-acl':
|
{ 's3:x-amz-acl':
|
||||||
['public-read', 'public-read-write'] } };
|
['public-read', 'public-read-write'] } };
|
||||||
const rcModifiers = {
|
const rcModifiers = {
|
||||||
_generalResource: 'bucket',
|
_generalResource: 'bucket',
|
||||||
|
@ -402,7 +402,7 @@ describe('policyEvaluator', () => {
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Resource = 'arn:aws:s3:::bucket/*';
|
policy.Statement.Resource = 'arn:aws:s3:::bucket/*';
|
||||||
policy.Statement.Condition = { StringNotEquals:
|
policy.Statement.Condition = { StringNotEquals:
|
||||||
{ 's3:x-amz-acl':
|
{ 's3:x-amz-acl':
|
||||||
['public-read', 'public-read-write'] } };
|
['public-read', 'public-read-write'] } };
|
||||||
const rcModifiers = {
|
const rcModifiers = {
|
||||||
_generalResource: 'bucket',
|
_generalResource: 'bucket',
|
||||||
|
@ -419,7 +419,7 @@ describe('policyEvaluator', () => {
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Resource = 'arn:aws:s3:::bucket/*';
|
policy.Statement.Resource = 'arn:aws:s3:::bucket/*';
|
||||||
policy.Statement.Condition = { StringNotEquals:
|
policy.Statement.Condition = { StringNotEquals:
|
||||||
{ 's3:x-amz-acl':
|
{ 's3:x-amz-acl':
|
||||||
['public-read', 'public-read-write'] } };
|
['public-read', 'public-read-write'] } };
|
||||||
const rcModifiers = {
|
const rcModifiers = {
|
||||||
_generalResource: 'bucket',
|
_generalResource: 'bucket',
|
||||||
|
@ -432,7 +432,7 @@ describe('policyEvaluator', () => {
|
||||||
'if do not meet condition',
|
'if do not meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { StringEqualsIgnoreCase:
|
policy.Statement.Condition = { StringEqualsIgnoreCase:
|
||||||
{ 'aws:UserAgent':
|
{ 'aws:UserAgent':
|
||||||
['CyberSquaw', 's3Sergeant', 'jetSetter'] } };
|
['CyberSquaw', 's3Sergeant', 'jetSetter'] } };
|
||||||
// Not one of the options
|
// Not one of the options
|
||||||
const rcModifiers = {
|
const rcModifiers = {
|
||||||
|
@ -447,7 +447,7 @@ describe('policyEvaluator', () => {
|
||||||
'if meet condition',
|
'if meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { StringEqualsIgnoreCase:
|
policy.Statement.Condition = { StringEqualsIgnoreCase:
|
||||||
{ 'aws:UserAgent':
|
{ 'aws:UserAgent':
|
||||||
['CyberSquaw', 's3Sergeant', 'jetSetter'] } };
|
['CyberSquaw', 's3Sergeant', 'jetSetter'] } };
|
||||||
const rcModifiers = {
|
const rcModifiers = {
|
||||||
_headers: {
|
_headers: {
|
||||||
|
@ -461,7 +461,7 @@ describe('policyEvaluator', () => {
|
||||||
'if do not meet condition',
|
'if do not meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { StringNotEqualsIgnoreCase:
|
policy.Statement.Condition = { StringNotEqualsIgnoreCase:
|
||||||
{ 'aws:UserAgent':
|
{ 'aws:UserAgent':
|
||||||
['CyberSquaw', 's3Sergeant', 'jetSetter'] } };
|
['CyberSquaw', 's3Sergeant', 'jetSetter'] } };
|
||||||
const rcModifiers = {
|
const rcModifiers = {
|
||||||
_headers: {
|
_headers: {
|
||||||
|
@ -475,7 +475,7 @@ describe('policyEvaluator', () => {
|
||||||
'if meet condition',
|
'if meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { StringNotEqualsIgnoreCase:
|
policy.Statement.Condition = { StringNotEqualsIgnoreCase:
|
||||||
{ 'aws:UserAgent':
|
{ 'aws:UserAgent':
|
||||||
['CyberSquaw', 's3Sergeant', 'jetSetter'] } };
|
['CyberSquaw', 's3Sergeant', 'jetSetter'] } };
|
||||||
const rcModifiers = {
|
const rcModifiers = {
|
||||||
_headers: {
|
_headers: {
|
||||||
|
@ -488,7 +488,7 @@ describe('policyEvaluator', () => {
|
||||||
'if condition parameter is completely missing from request',
|
'if condition parameter is completely missing from request',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { StringNotEqualsIgnoreCase:
|
policy.Statement.Condition = { StringNotEqualsIgnoreCase:
|
||||||
{ 'aws:UserAgent':
|
{ 'aws:UserAgent':
|
||||||
['CyberSquaw', 's3Sergeant', 'jetSetter'] } };
|
['CyberSquaw', 's3Sergeant', 'jetSetter'] } };
|
||||||
const rcModifiers = {};
|
const rcModifiers = {};
|
||||||
check(requestContext, rcModifiers, policy, 'Allow');
|
check(requestContext, rcModifiers, policy, 'Allow');
|
||||||
|
@ -621,7 +621,7 @@ describe('policyEvaluator', () => {
|
||||||
'if do not meet condition',
|
'if do not meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateEquals:
|
policy.Statement.Condition = { DateEquals:
|
||||||
{ 'aws:TokenIssueTime':
|
{ 'aws:TokenIssueTime':
|
||||||
'2016-06-30T19:42:23.531Z' } };
|
'2016-06-30T19:42:23.531Z' } };
|
||||||
let rcModifiers =
|
let rcModifiers =
|
||||||
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
|
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
|
||||||
|
@ -635,7 +635,7 @@ describe('policyEvaluator', () => {
|
||||||
'if do not meet condition',
|
'if do not meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition =
|
policy.Statement.Condition =
|
||||||
{ 'aws:EpochTime':
|
{ 'aws:EpochTime':
|
||||||
'1467315743531' };
|
'1467315743531' };
|
||||||
const rcModifiers =
|
const rcModifiers =
|
||||||
{ _tokenIssueTime: '1467315743431' };
|
{ _tokenIssueTime: '1467315743431' };
|
||||||
|
@ -646,7 +646,7 @@ describe('policyEvaluator', () => {
|
||||||
'if meet condition',
|
'if meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateEquals:
|
policy.Statement.Condition = { DateEquals:
|
||||||
{ 'aws:TokenIssueTime':
|
{ 'aws:TokenIssueTime':
|
||||||
'2016-06-30T19:42:23.431Z' } };
|
'2016-06-30T19:42:23.431Z' } };
|
||||||
let rcModifiers =
|
let rcModifiers =
|
||||||
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
|
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
|
||||||
|
@ -661,7 +661,7 @@ describe('policyEvaluator', () => {
|
||||||
() => {
|
() => {
|
||||||
const clock = lolex.install(1467315743431);
|
const clock = lolex.install(1467315743431);
|
||||||
policy.Statement.Condition = { DateEquals:
|
policy.Statement.Condition = { DateEquals:
|
||||||
{ 'aws:EpochTime':
|
{ 'aws:EpochTime':
|
||||||
'1467315743431' } };
|
'1467315743431' } };
|
||||||
check(requestContext, {}, policy, 'Allow');
|
check(requestContext, {}, policy, 'Allow');
|
||||||
clock.uninstall();
|
clock.uninstall();
|
||||||
|
@ -671,7 +671,7 @@ describe('policyEvaluator', () => {
|
||||||
'if do not meet condition',
|
'if do not meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateNotEquals:
|
policy.Statement.Condition = { DateNotEquals:
|
||||||
{ 'aws:TokenIssueTime':
|
{ 'aws:TokenIssueTime':
|
||||||
'2016-06-30T19:42:23.431Z' } };
|
'2016-06-30T19:42:23.431Z' } };
|
||||||
let rcModifiers =
|
let rcModifiers =
|
||||||
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
|
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
|
||||||
|
@ -686,7 +686,7 @@ describe('policyEvaluator', () => {
|
||||||
() => {
|
() => {
|
||||||
const clock = lolex.install(1467315743431);
|
const clock = lolex.install(1467315743431);
|
||||||
policy.Statement.Condition = { DateNotEquals:
|
policy.Statement.Condition = { DateNotEquals:
|
||||||
{ 'aws:EpochTime':
|
{ 'aws:EpochTime':
|
||||||
'1467315743431' } };
|
'1467315743431' } };
|
||||||
check(requestContext, {}, policy, 'Neutral');
|
check(requestContext, {}, policy, 'Neutral');
|
||||||
clock.uninstall();
|
clock.uninstall();
|
||||||
|
@ -696,7 +696,7 @@ describe('policyEvaluator', () => {
|
||||||
'if meet condition',
|
'if meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateNotEquals:
|
policy.Statement.Condition = { DateNotEquals:
|
||||||
{ 'aws:TokenIssueTime':
|
{ 'aws:TokenIssueTime':
|
||||||
'2016-06-30T19:42:23.531Z' } };
|
'2016-06-30T19:42:23.531Z' } };
|
||||||
let rcModifiers =
|
let rcModifiers =
|
||||||
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
|
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
|
||||||
|
@ -710,7 +710,7 @@ describe('policyEvaluator', () => {
|
||||||
'time if meet condition',
|
'time if meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateNotEquals:
|
policy.Statement.Condition = { DateNotEquals:
|
||||||
{ 'aws:EpochTime':
|
{ 'aws:EpochTime':
|
||||||
'1467315743531' } };
|
'1467315743531' } };
|
||||||
check(requestContext, {}, policy, 'Allow');
|
check(requestContext, {}, policy, 'Allow');
|
||||||
});
|
});
|
||||||
|
@ -719,7 +719,7 @@ describe('policyEvaluator', () => {
|
||||||
'condition with ISO time if do not meet condition',
|
'condition with ISO time if do not meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateLessThan:
|
policy.Statement.Condition = { DateLessThan:
|
||||||
{ 'aws:TokenIssueTime':
|
{ 'aws:TokenIssueTime':
|
||||||
'2016-06-30T19:42:23.431Z' } };
|
'2016-06-30T19:42:23.431Z' } };
|
||||||
let rcModifiers =
|
let rcModifiers =
|
||||||
{ _tokenIssueTime: '2016-06-30T19:42:23.531Z' };
|
{ _tokenIssueTime: '2016-06-30T19:42:23.531Z' };
|
||||||
|
@ -733,7 +733,7 @@ describe('policyEvaluator', () => {
|
||||||
'with ISO time if do not meet condition',
|
'with ISO time if do not meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateLessThan:
|
policy.Statement.Condition = { DateLessThan:
|
||||||
{ 'aws:CurrentTime':
|
{ 'aws:CurrentTime':
|
||||||
'2016-06-30T19:42:23.431Z' } };
|
'2016-06-30T19:42:23.431Z' } };
|
||||||
check(requestContext, {}, policy, 'Neutral');
|
check(requestContext, {}, policy, 'Neutral');
|
||||||
});
|
});
|
||||||
|
@ -742,7 +742,7 @@ describe('policyEvaluator', () => {
|
||||||
'with epoch time if do not meet condition',
|
'with epoch time if do not meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateLessThan:
|
policy.Statement.Condition = { DateLessThan:
|
||||||
{ 'aws:EpochTime':
|
{ 'aws:EpochTime':
|
||||||
'1467315743431' } };
|
'1467315743431' } };
|
||||||
check(requestContext, {}, policy, 'Neutral');
|
check(requestContext, {}, policy, 'Neutral');
|
||||||
});
|
});
|
||||||
|
@ -751,8 +751,8 @@ describe('policyEvaluator', () => {
|
||||||
'condition if meet condition',
|
'condition if meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateLessThan:
|
policy.Statement.Condition = { DateLessThan:
|
||||||
{ 'aws:TokenIssueTime':
|
{ 'aws:TokenIssueTime':
|
||||||
['2016-06-30T19:42:23.431Z', '2017-06-30T19:42:23.431Z',
|
['2016-06-30T19:42:23.431Z', '2017-06-30T19:42:23.431Z',
|
||||||
'2018-06-30T19:42:23.431Z'] },
|
'2018-06-30T19:42:23.431Z'] },
|
||||||
};
|
};
|
||||||
const rcModifiers =
|
const rcModifiers =
|
||||||
|
@ -764,7 +764,7 @@ describe('policyEvaluator', () => {
|
||||||
'condition if meet condition',
|
'condition if meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateLessThan:
|
policy.Statement.Condition = { DateLessThan:
|
||||||
{ 'aws:CurrentTime':
|
{ 'aws:CurrentTime':
|
||||||
'2099-06-30T19:42:23.431Z' } };
|
'2099-06-30T19:42:23.431Z' } };
|
||||||
check(requestContext, {}, policy, 'Allow');
|
check(requestContext, {}, policy, 'Allow');
|
||||||
const rcModifiers = { _tokenIssueTime: '1467315743331' };
|
const rcModifiers = { _tokenIssueTime: '1467315743331' };
|
||||||
|
@ -775,7 +775,7 @@ describe('policyEvaluator', () => {
|
||||||
'condition if meet condition',
|
'condition if meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateLessThan:
|
policy.Statement.Condition = { DateLessThan:
|
||||||
{ 'aws:EpochTime':
|
{ 'aws:EpochTime':
|
||||||
'4086531743431' } };
|
'4086531743431' } };
|
||||||
check(requestContext, {}, policy, 'Allow');
|
check(requestContext, {}, policy, 'Allow');
|
||||||
});
|
});
|
||||||
|
@ -784,7 +784,7 @@ describe('policyEvaluator', () => {
|
||||||
'with ISO time if do not meet condition',
|
'with ISO time if do not meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateLessThanEquals:
|
policy.Statement.Condition = { DateLessThanEquals:
|
||||||
{ 'aws:TokenIssueTime':
|
{ 'aws:TokenIssueTime':
|
||||||
'2016-06-30T19:42:23.431Z' } };
|
'2016-06-30T19:42:23.431Z' } };
|
||||||
const rcModifiers =
|
const rcModifiers =
|
||||||
{ _tokenIssueTime: '2016-06-30T19:42:23.531Z' };
|
{ _tokenIssueTime: '2016-06-30T19:42:23.531Z' };
|
||||||
|
@ -795,7 +795,7 @@ describe('policyEvaluator', () => {
|
||||||
'condition with ISO time if do not meet condition',
|
'condition with ISO time if do not meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateLessThanEquals:
|
policy.Statement.Condition = { DateLessThanEquals:
|
||||||
{ 'aws:CurrentTime':
|
{ 'aws:CurrentTime':
|
||||||
'2016-06-30T19:42:23.431Z' } };
|
'2016-06-30T19:42:23.431Z' } };
|
||||||
check(requestContext, {}, policy, 'Neutral');
|
check(requestContext, {}, policy, 'Neutral');
|
||||||
});
|
});
|
||||||
|
@ -804,7 +804,7 @@ describe('policyEvaluator', () => {
|
||||||
'with ISO time if meet condition',
|
'with ISO time if meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateLessThanEquals:
|
policy.Statement.Condition = { DateLessThanEquals:
|
||||||
{ 'aws:TokenIssueTime':
|
{ 'aws:TokenIssueTime':
|
||||||
'2016-06-30T19:42:23.431Z' } };
|
'2016-06-30T19:42:23.431Z' } };
|
||||||
const rcModifiers =
|
const rcModifiers =
|
||||||
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
|
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
|
||||||
|
@ -815,7 +815,7 @@ describe('policyEvaluator', () => {
|
||||||
'condition with ISO time if meet condition',
|
'condition with ISO time if meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateLessThanEquals:
|
policy.Statement.Condition = { DateLessThanEquals:
|
||||||
{ 'aws:CurrentTime':
|
{ 'aws:CurrentTime':
|
||||||
'2099-06-30T19:42:23.431Z' } };
|
'2099-06-30T19:42:23.431Z' } };
|
||||||
check(requestContext, {}, policy, 'Allow');
|
check(requestContext, {}, policy, 'Allow');
|
||||||
});
|
});
|
||||||
|
@ -824,7 +824,7 @@ describe('policyEvaluator', () => {
|
||||||
'with ISO time if do not meet condition',
|
'with ISO time if do not meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateGreaterThan:
|
policy.Statement.Condition = { DateGreaterThan:
|
||||||
{ 'aws:TokenIssueTime':
|
{ 'aws:TokenIssueTime':
|
||||||
'2016-06-30T19:42:23.431Z' } };
|
'2016-06-30T19:42:23.431Z' } };
|
||||||
const rcModifiers =
|
const rcModifiers =
|
||||||
{ _tokenIssueTime: '2016-06-30T19:42:23.331Z' };
|
{ _tokenIssueTime: '2016-06-30T19:42:23.331Z' };
|
||||||
|
@ -835,7 +835,7 @@ describe('policyEvaluator', () => {
|
||||||
'condition with ISO time if do not meet condition',
|
'condition with ISO time if do not meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateGreaterThan:
|
policy.Statement.Condition = { DateGreaterThan:
|
||||||
{ 'aws:CurrentTime':
|
{ 'aws:CurrentTime':
|
||||||
'2099-06-30T19:42:23.431Z' } };
|
'2099-06-30T19:42:23.431Z' } };
|
||||||
check(requestContext, {}, policy, 'Neutral');
|
check(requestContext, {}, policy, 'Neutral');
|
||||||
});
|
});
|
||||||
|
@ -844,7 +844,7 @@ describe('policyEvaluator', () => {
|
||||||
'with ISO time if meet condition',
|
'with ISO time if meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateGreaterThan:
|
policy.Statement.Condition = { DateGreaterThan:
|
||||||
{ 'aws:TokenIssueTime':
|
{ 'aws:TokenIssueTime':
|
||||||
'2016-06-30T19:42:23.431Z' } };
|
'2016-06-30T19:42:23.431Z' } };
|
||||||
const rcModifiers =
|
const rcModifiers =
|
||||||
{ _tokenIssueTime: '2016-06-30T19:42:23.531Z' };
|
{ _tokenIssueTime: '2016-06-30T19:42:23.531Z' };
|
||||||
|
@ -855,7 +855,7 @@ describe('policyEvaluator', () => {
|
||||||
'condition with ISO time if meet condition',
|
'condition with ISO time if meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateGreaterThan:
|
policy.Statement.Condition = { DateGreaterThan:
|
||||||
{ 'aws:CurrentTime':
|
{ 'aws:CurrentTime':
|
||||||
'2016-06-30T19:42:23.431Z' } };
|
'2016-06-30T19:42:23.431Z' } };
|
||||||
check(requestContext, {}, policy, 'Allow');
|
check(requestContext, {}, policy, 'Allow');
|
||||||
});
|
});
|
||||||
|
@ -864,7 +864,7 @@ describe('policyEvaluator', () => {
|
||||||
'with ISO time if do not meet condition',
|
'with ISO time if do not meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateGreaterThanEquals:
|
policy.Statement.Condition = { DateGreaterThanEquals:
|
||||||
{ 'aws:TokenIssueTime':
|
{ 'aws:TokenIssueTime':
|
||||||
'2016-06-30T19:42:23.431Z' } };
|
'2016-06-30T19:42:23.431Z' } };
|
||||||
const rcModifiers =
|
const rcModifiers =
|
||||||
{ _tokenIssueTime: '2016-06-30T19:42:23.331Z' };
|
{ _tokenIssueTime: '2016-06-30T19:42:23.331Z' };
|
||||||
|
@ -875,7 +875,7 @@ describe('policyEvaluator', () => {
|
||||||
'condition with ISO time if do not meet condition',
|
'condition with ISO time if do not meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateGreaterThanEquals:
|
policy.Statement.Condition = { DateGreaterThanEquals:
|
||||||
{ 'aws:CurrentTime':
|
{ 'aws:CurrentTime':
|
||||||
'2099-06-30T19:42:23.431Z' } };
|
'2099-06-30T19:42:23.431Z' } };
|
||||||
check(requestContext, {}, policy, 'Neutral');
|
check(requestContext, {}, policy, 'Neutral');
|
||||||
});
|
});
|
||||||
|
@ -884,7 +884,7 @@ describe('policyEvaluator', () => {
|
||||||
'condition with ISO time if meet condition',
|
'condition with ISO time if meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateGreaterThanEquals:
|
policy.Statement.Condition = { DateGreaterThanEquals:
|
||||||
{ 'aws:TokenIssueTime':
|
{ 'aws:TokenIssueTime':
|
||||||
'2016-06-30T19:42:23.431Z' } };
|
'2016-06-30T19:42:23.431Z' } };
|
||||||
const rcModifiers =
|
const rcModifiers =
|
||||||
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
|
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
|
||||||
|
@ -895,7 +895,7 @@ describe('policyEvaluator', () => {
|
||||||
'time condition with ISO time if meet condition',
|
'time condition with ISO time if meet condition',
|
||||||
() => {
|
() => {
|
||||||
policy.Statement.Condition = { DateGreaterThanEquals:
|
policy.Statement.Condition = { DateGreaterThanEquals:
|
||||||
{ 'aws:CurrentTime':
|
{ 'aws:CurrentTime':
|
||||||
'2016-06-30T19:42:23.431Z' } };
|
'2016-06-30T19:42:23.431Z' } };
|
||||||
check(requestContext, {}, policy, 'Allow');
|
check(requestContext, {}, policy, 'Allow');
|
||||||
});
|
});
|
||||||
|
@ -1078,6 +1078,36 @@ describe('policyEvaluator', () => {
|
||||||
check(requestContext, {}, policy, 'Allow');
|
check(requestContext, {}, policy, 'Allow');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should allow policy arn if meet condition',
|
||||||
|
() => {
|
||||||
|
policy.Statement.Condition = {
|
||||||
|
ArnLike: { 'iam:PolicyArn':
|
||||||
|
['arn:aws:iam::012345678901:policy/dev/*'] },
|
||||||
|
};
|
||||||
|
requestContext.setRequesterInfo(
|
||||||
|
{ accountid: '012345678901' });
|
||||||
|
const rcModifiers = {
|
||||||
|
_policyArn:
|
||||||
|
'arn:aws:iam::012345678901:policy/dev/devMachine1',
|
||||||
|
};
|
||||||
|
check(requestContext, rcModifiers, policy, 'Allow');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not allow policy arn if do not meet condition',
|
||||||
|
() => {
|
||||||
|
policy.Statement.Condition = {
|
||||||
|
ArnLike: { 'iam:PolicyArn':
|
||||||
|
['arn:aws:iam::012345678901:policy/dev/*'] },
|
||||||
|
};
|
||||||
|
requestContext.setRequesterInfo(
|
||||||
|
{ accountid: '012345678901' });
|
||||||
|
const rcModifiers = {
|
||||||
|
_policyArn:
|
||||||
|
'arn:aws:iam::012345678901:policy/admin/deleteUser',
|
||||||
|
};
|
||||||
|
check(requestContext, rcModifiers, policy, 'Neutral');
|
||||||
|
});
|
||||||
|
|
||||||
it('should allow access with multiple operator conditions ' +
|
it('should allow access with multiple operator conditions ' +
|
||||||
'and multiple conditions under an operator',
|
'and multiple conditions under an operator',
|
||||||
() => {
|
() => {
|
||||||
|
@ -1142,7 +1172,7 @@ describe('policyEvaluator', () => {
|
||||||
requestContext.setRequesterInfo({});
|
requestContext.setRequesterInfo({});
|
||||||
const result = evaluateAllPolicies(requestContext,
|
const result = evaluateAllPolicies(requestContext,
|
||||||
[samples['arn:aws:iam::aws:policy/AmazonS3FullAccess'],
|
[samples['arn:aws:iam::aws:policy/AmazonS3FullAccess'],
|
||||||
samples['Deny Bucket Policy']], log);
|
samples['Deny Bucket Policy']], log);
|
||||||
assert.strictEqual(result, 'Deny');
|
assert.strictEqual(result, 'Deny');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -1153,7 +1183,7 @@ describe('policyEvaluator', () => {
|
||||||
requestContext.setRequesterInfo({});
|
requestContext.setRequesterInfo({});
|
||||||
const result = evaluateAllPolicies(requestContext,
|
const result = evaluateAllPolicies(requestContext,
|
||||||
[samples['Multi-Statement Policy'],
|
[samples['Multi-Statement Policy'],
|
||||||
samples['Variable Bucket Policy']], log);
|
samples['Variable Bucket Policy']], log);
|
||||||
assert.strictEqual(result, 'Deny');
|
assert.strictEqual(result, 'Deny');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -1165,7 +1195,7 @@ describe('policyEvaluator', () => {
|
||||||
requestContext.setRequesterInfo({});
|
requestContext.setRequesterInfo({});
|
||||||
const result = evaluateAllPolicies(requestContext,
|
const result = evaluateAllPolicies(requestContext,
|
||||||
[samples['Multi-Statement Policy'],
|
[samples['Multi-Statement Policy'],
|
||||||
samples['Variable Bucket Policy']], log);
|
samples['Variable Bucket Policy']], log);
|
||||||
assert.strictEqual(result, 'Deny');
|
assert.strictEqual(result, 'Deny');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -1232,11 +1262,13 @@ describe('handleWildcards', () => {
|
||||||
assert.deepStrictEqual(result, '^abc\\*abc\\?abc\\$$');
|
assert.deepStrictEqual(result, '^abc\\*abc\\?abc\\$$');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
/* eslint-disable no-useless-escape */
|
||||||
it('should escape other regular expression special characters', () => {
|
it('should escape other regular expression special characters', () => {
|
||||||
const result = handleWildcards('*^.+?()|[\]{}');
|
const result = handleWildcards('*^.+?()|[\]{}');
|
||||||
assert.deepStrictEqual(result,
|
assert.deepStrictEqual(result,
|
||||||
'^.*?\\^\\.\\+.{1}\\(\\)\\|\\[\\\]\\{\\}$');
|
'^.*?\\^\\.\\+.{1}\\(\\)\\|\\[\\\]\\{\\}$');
|
||||||
});
|
});
|
||||||
|
/* eslint-enable */
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('substituteVariables', () => {
|
describe('substituteVariables', () => {
|
||||||
|
|
|
@ -0,0 +1,73 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
|
||||||
|
const azureMpuUtils =
|
||||||
|
require('../../../../lib/s3middleware/azureHelpers/mpuUtils');
|
||||||
|
const padString = azureMpuUtils.padString;
|
||||||
|
const getSubPartInfo = azureMpuUtils.getSubPartInfo;
|
||||||
|
|
||||||
|
const padStringTests = [
|
||||||
|
{
|
||||||
|
category: 'partNumber',
|
||||||
|
strings: [1, 10, 100, 10000],
|
||||||
|
expectedResults: ['00001', '00010', '00100', '10000'],
|
||||||
|
}, {
|
||||||
|
category: 'subPart',
|
||||||
|
strings: [1, 50],
|
||||||
|
expectedResults: ['01', '50'],
|
||||||
|
}, {
|
||||||
|
category: 'part',
|
||||||
|
strings: ['test|'],
|
||||||
|
expectedResults:
|
||||||
|
['test|%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const oneMb = 1024 * 1024;
|
||||||
|
const oneHundredMb = oneMb * 100;
|
||||||
|
const subPartInfoTests = [
|
||||||
|
{
|
||||||
|
desc: '100 mb',
|
||||||
|
size: oneHundredMb,
|
||||||
|
expectedNumberSubParts: 1,
|
||||||
|
expectedLastPartSize: oneHundredMb,
|
||||||
|
}, {
|
||||||
|
desc: '101 mb',
|
||||||
|
size: oneHundredMb + oneMb,
|
||||||
|
expectedNumberSubParts: 2,
|
||||||
|
expectedLastPartSize: oneMb,
|
||||||
|
}, {
|
||||||
|
desc: '599 mb',
|
||||||
|
size: 6 * oneHundredMb - oneMb,
|
||||||
|
expectedNumberSubParts: 6,
|
||||||
|
expectedLastPartSize: oneHundredMb - oneMb,
|
||||||
|
}, {
|
||||||
|
desc: '600 mb',
|
||||||
|
size: 6 * oneHundredMb,
|
||||||
|
expectedNumberSubParts: 6,
|
||||||
|
expectedLastPartSize: oneHundredMb,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
describe('s3middleware Azure MPU helper utility function', () => {
|
||||||
|
padStringTests.forEach(test => {
|
||||||
|
it(`padString should pad a ${test.category}`, done => {
|
||||||
|
const result = test.strings.map(str =>
|
||||||
|
padString(str, test.category));
|
||||||
|
assert.deepStrictEqual(result, test.expectedResults);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
subPartInfoTests.forEach(test => {
|
||||||
|
const { desc, size, expectedNumberSubParts, expectedLastPartSize }
|
||||||
|
= test;
|
||||||
|
it('getSubPartInfo should return correct result for ' +
|
||||||
|
`dataContentLength of ${desc}`, done => {
|
||||||
|
const result = getSubPartInfo(size);
|
||||||
|
const expectedLastPartIndex = expectedNumberSubParts - 1;
|
||||||
|
assert.strictEqual(result.lastPartIndex, expectedLastPartIndex);
|
||||||
|
assert.strictEqual(result.lastPartSize, expectedLastPartSize);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,24 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const crypto = require('crypto');
|
||||||
|
|
||||||
|
const objectUtils =
|
||||||
|
require('../../../lib/s3middleware/objectUtils');
|
||||||
|
|
||||||
|
const hexHash = 'd41d8cd98f00b204e9800998ecf8427e';
|
||||||
|
const base64Hash = '1B2M2Y8AsgTpgAmY7PhCfg==';
|
||||||
|
|
||||||
|
describe('s3middleware object utilites', () => {
|
||||||
|
it('should convert hexademal MD5 to base 64', done => {
|
||||||
|
const hash = crypto.createHash('md5').digest('hex');
|
||||||
|
const convertedHash = objectUtils.getBase64MD5(hash);
|
||||||
|
assert.strictEqual(convertedHash, base64Hash);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should convert base 64 MD5 to hexadecimal', done => {
|
||||||
|
const hash = crypto.createHash('md5').digest('base64');
|
||||||
|
const convertedHash = objectUtils.getHexMD5(hash);
|
||||||
|
assert.strictEqual(convertedHash, hexHash);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
|
@ -77,7 +77,7 @@ describe('raft record log client', () => {
|
||||||
function setup(done) {
|
function setup(done) {
|
||||||
bucketClient = new BucketClientMock();
|
bucketClient = new BucketClientMock();
|
||||||
logClient = new LogConsumer({ bucketClient,
|
logClient = new LogConsumer({ bucketClient,
|
||||||
raftSession: 0 });
|
raftSession: 0 });
|
||||||
done();
|
done();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -126,7 +126,7 @@ describe('raft record log client', () => {
|
||||||
describe('error cases', () => {
|
describe('error cases', () => {
|
||||||
it('should handle 404 error gracefully', done => {
|
it('should handle 404 error gracefully', done => {
|
||||||
const logClient = new LogConsumer({ bucketClient,
|
const logClient = new LogConsumer({ bucketClient,
|
||||||
raftSession: 1 });
|
raftSession: 1 });
|
||||||
logClient.readRecords({}, (err, info) => {
|
logClient.readRecords({}, (err, info) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
assert.deepStrictEqual(info, {
|
assert.deepStrictEqual(info, {
|
||||||
|
@ -136,7 +136,7 @@ describe('raft record log client', () => {
|
||||||
});
|
});
|
||||||
it('should handle 416 error gracefully', done => {
|
it('should handle 416 error gracefully', done => {
|
||||||
const logClient = new LogConsumer({ bucketClient,
|
const logClient = new LogConsumer({ bucketClient,
|
||||||
raftSession: 2 });
|
raftSession: 2 });
|
||||||
logClient.readRecords({}, (err, info) => {
|
logClient.readRecords({}, (err, info) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
assert.deepStrictEqual(info, {
|
assert.deepStrictEqual(info, {
|
||||||
|
@ -146,7 +146,7 @@ describe('raft record log client', () => {
|
||||||
});
|
});
|
||||||
it('should handle other errors correctly', done => {
|
it('should handle other errors correctly', done => {
|
||||||
const logClient = new LogConsumer({ bucketClient,
|
const logClient = new LogConsumer({ bucketClient,
|
||||||
raftSession: 3 });
|
raftSession: 3 });
|
||||||
logClient.readRecords({}, err => {
|
logClient.readRecords({}, err => {
|
||||||
assert(err);
|
assert(err);
|
||||||
assert(err.InternalError);
|
assert(err.InternalError);
|
||||||
|
@ -155,7 +155,7 @@ describe('raft record log client', () => {
|
||||||
});
|
});
|
||||||
it('should not crash with malformed log response', done => {
|
it('should not crash with malformed log response', done => {
|
||||||
const logClient = new LogConsumer({ bucketClient,
|
const logClient = new LogConsumer({ bucketClient,
|
||||||
raftSession: 4 });
|
raftSession: 4 });
|
||||||
logClient.readRecords({}, err => {
|
logClient.readRecords({}, err => {
|
||||||
assert(err);
|
assert(err);
|
||||||
assert(err.InternalError);
|
assert(err.InternalError);
|
||||||
|
|
|
@ -116,14 +116,14 @@ describe('record log - persistent log of metadata operations', () => {
|
||||||
it('should be able to add records and list them thereafter', done => {
|
it('should be able to add records and list them thereafter', done => {
|
||||||
debug('going to append records');
|
debug('going to append records');
|
||||||
const ops = [{ type: 'put', key: 'foo', value: 'bar',
|
const ops = [{ type: 'put', key: 'foo', value: 'bar',
|
||||||
prefix: ['foobucket'] },
|
prefix: ['foobucket'] },
|
||||||
{ type: 'del', key: 'baz',
|
{ type: 'del', key: 'baz',
|
||||||
prefix: ['foobucket'] },
|
prefix: ['foobucket'] },
|
||||||
{ type: 'put',
|
{ type: 'put',
|
||||||
key: 'Pâtisserie=中文-español-English',
|
key: 'Pâtisserie=中文-español-English',
|
||||||
value: 'yummy',
|
value: 'yummy',
|
||||||
prefix: ['foobucket'] },
|
prefix: ['foobucket'] },
|
||||||
];
|
];
|
||||||
logProxy.createLogRecordOps(ops, (err, logEntries) => {
|
logProxy.createLogRecordOps(ops, (err, logEntries) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
db.batch(ops.concat(logEntries), err => {
|
db.batch(ops.concat(logEntries), err => {
|
||||||
|
@ -198,7 +198,7 @@ describe('record log - persistent log of metadata operations', () => {
|
||||||
for (let i = 1; i <= 1000; ++i) {
|
for (let i = 1; i <= 1000; ++i) {
|
||||||
recordsToAdd.push(
|
recordsToAdd.push(
|
||||||
{ type: 'put', key: `foo${i}`, value: `bar${i}`,
|
{ type: 'put', key: `foo${i}`, value: `bar${i}`,
|
||||||
prefix: ['foobucket'] });
|
prefix: ['foobucket'] });
|
||||||
}
|
}
|
||||||
logProxy.createLogRecordOps(recordsToAdd, (err, logRecs) => {
|
logProxy.createLogRecordOps(recordsToAdd, (err, logRecs) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
|
|
|
@ -163,60 +163,60 @@ describe('test VSP', () => {
|
||||||
};
|
};
|
||||||
vsp.put(request, logger, next);
|
vsp.put(request, logger, next);
|
||||||
},
|
},
|
||||||
(res, next) => {
|
(res, next) => {
|
||||||
v1 = Version.from(res).getVersionId();
|
v1 = Version.from(res).getVersionId();
|
||||||
const request = {
|
const request = {
|
||||||
db: 'foo',
|
db: 'foo',
|
||||||
key: 'bar',
|
key: 'bar',
|
||||||
value: '{"qux":"quz2"}',
|
value: '{"qux":"quz2"}',
|
||||||
options: { versioning: true },
|
options: { versioning: true },
|
||||||
};
|
};
|
||||||
vsp.put(request, logger, next);
|
vsp.put(request, logger, next);
|
||||||
},
|
},
|
||||||
(res, next) => {
|
(res, next) => {
|
||||||
v2 = Version.from(res).getVersionId();
|
v2 = Version.from(res).getVersionId();
|
||||||
|
|
||||||
// overwriting v1: master should not be updated
|
// overwriting v1: master should not be updated
|
||||||
const request = {
|
const request = {
|
||||||
db: 'foo',
|
db: 'foo',
|
||||||
key: 'bar',
|
key: 'bar',
|
||||||
value: '{"qux":"quz1.1"}',
|
value: '{"qux":"quz1.1"}',
|
||||||
options: { versioning: true,
|
options: { versioning: true,
|
||||||
versionId: v1 },
|
versionId: v1 },
|
||||||
};
|
};
|
||||||
vsp.put(request, logger, next);
|
vsp.put(request, logger, next);
|
||||||
},
|
},
|
||||||
(res, next) => {
|
(res, next) => {
|
||||||
const request = {
|
const request = {
|
||||||
db: 'foo',
|
db: 'foo',
|
||||||
key: 'bar',
|
key: 'bar',
|
||||||
};
|
};
|
||||||
vsp.get(request, logger, next);
|
vsp.get(request, logger, next);
|
||||||
},
|
},
|
||||||
(res, next) => {
|
(res, next) => {
|
||||||
assert.strictEqual(JSON.parse(res).qux, 'quz2');
|
assert.strictEqual(JSON.parse(res).qux, 'quz2');
|
||||||
|
|
||||||
// overwriting v2: master should be updated
|
// overwriting v2: master should be updated
|
||||||
const request = {
|
const request = {
|
||||||
db: 'foo',
|
db: 'foo',
|
||||||
key: 'bar',
|
key: 'bar',
|
||||||
value: '{"qux":"quz2.1"}',
|
value: '{"qux":"quz2.1"}',
|
||||||
options: { versioning: true,
|
options: { versioning: true,
|
||||||
versionId: v2 },
|
versionId: v2 },
|
||||||
};
|
};
|
||||||
vsp.put(request, logger, next);
|
vsp.put(request, logger, next);
|
||||||
},
|
},
|
||||||
(res, next) => {
|
(res, next) => {
|
||||||
const request = {
|
const request = {
|
||||||
db: 'foo',
|
db: 'foo',
|
||||||
key: 'bar',
|
key: 'bar',
|
||||||
};
|
};
|
||||||
vsp.get(request, logger, next);
|
vsp.get(request, logger, next);
|
||||||
},
|
},
|
||||||
(res, next) => {
|
(res, next) => {
|
||||||
assert.strictEqual(JSON.parse(res).qux, 'quz2.1');
|
assert.strictEqual(JSON.parse(res).qux, 'quz2.1');
|
||||||
next();
|
next();
|
||||||
}],
|
}],
|
||||||
done);
|
done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
Loading…
Reference in New Issue