Compare commits
60 Commits
3c54bd740f
...
bcabdeeadb
Author | SHA1 | Date |
---|---|---|
Jonathan Gramain | bcabdeeadb | |
Jonathan Gramain | e166d1a894 | |
ironman-machine | 44800cf175 | |
Jonathan Gramain | 51a4146876 | |
Rahul Padigela | 563bbfcb8b | |
Bennett Buchanan | 4942fab225 | |
Rahul Padigela | 91a828805b | |
Rahul Padigela | eb9b60c0ef | |
alexandremerle | 66acfbbab4 | |
Electra Chong | efe8ed76ba | |
Rahul Padigela | dad0d456d3 | |
Rahul Padigela | 4601794d49 | |
Rahul Padigela | 36157fb688 | |
Bennett Buchanan | 639374522d | |
Rahul Padigela | 2499ce7277 | |
Rahul Padigela | 0bab4069cd | |
Rahul Padigela | 5949e12ffc | |
Lauren Spiegel | 7ca3c0515a | |
Rahul Padigela | 711d64d5f1 | |
Rahul Padigela | b22d12009a | |
Jonathan Gramain | 50a90d2b41 | |
Dora Korpar | a1ce222a87 | |
Jonathan Gramain | a77bf3126d | |
ironman-machine | 300769dda6 | |
Jonathan Gramain | a65c554f64 | |
philipyoo | 894d41a30b | |
Rahul Padigela | 673da3de99 | |
Jonathan Gramain | 0f535cc26a | |
ironman-machine | b1447906dd | |
mvaude | d7e4e3b7aa | |
mvaude | b445a8487b | |
Rahul Padigela | af460a0939 | |
Bennett Buchanan | 8cf3d091cb | |
ironman-machine | 1f77deab61 | |
Jonathan Gramain | 96823f0a06 | |
ironman-machine | 41a823a57e | |
Jonathan Gramain | b31bc06e63 | |
Rahul Padigela | 46d703de6b | |
ironman-machine | bb3e63ea17 | |
Electra Chong | 5d466e01b3 | |
Electra Chong | 7cbdac5f52 | |
philipyoo | 1e2d9be8f7 | |
Electra Chong | e89395c428 | |
Vianney Rancurel | 58ac3abe1a | |
ironman-machine | c22b937fe5 | |
Jonathan Gramain | 4c1fa030bf | |
Rahul Padigela | 7e2676f635 | |
ironman-machine | e5cf9b1aec | |
Alexandre Merle | d1e7f05c7d | |
ironman-machine | 4323bfaab0 | |
Alexandre Merle | 9d9d21127c | |
ironman-machine | dd9df1745c | |
Alexandre Merle | 2fcf728d38 | |
Lauren Spiegel | e9993ed64e | |
Dora Korpar | 012e281366 | |
philipyoo | 0d62d5a161 | |
Rahul Padigela | cc5dad3e83 | |
Bennett Buchanan | 62f2accc5c | |
ironman-machine | 6ad2af98cd | |
Dora Korpar | 286a599ae8 |
|
@ -1 +1,5 @@
|
|||
# Logs
|
||||
*.log
|
||||
|
||||
# Dependency directory
|
||||
node_modules/
|
||||
|
|
|
@ -7,6 +7,8 @@ general:
|
|||
machine:
|
||||
node:
|
||||
version: 6.9.5
|
||||
services:
|
||||
- redis
|
||||
environment:
|
||||
CXX: g++-4.9
|
||||
|
||||
|
|
18
index.js
18
index.js
|
@ -29,6 +29,7 @@ module.exports = {
|
|||
evaluators: require('./lib/policyEvaluator/evaluator.js'),
|
||||
validateUserPolicy: require('./lib/policy/policyValidator')
|
||||
.validateUserPolicy,
|
||||
evaluatePrincipal: require('./lib/policyEvaluator/principal'),
|
||||
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
|
||||
},
|
||||
Clustering: require('./lib/Clustering'),
|
||||
|
@ -59,12 +60,22 @@ module.exports = {
|
|||
},
|
||||
s3middleware: {
|
||||
userMetadata: require('./lib/s3middleware/userMetadata'),
|
||||
convertToXml: require('./lib/s3middleware/convertToXml'),
|
||||
escapeForXml: require('./lib/s3middleware/escapeForXml'),
|
||||
tagging: require('./lib/s3middleware/tagging'),
|
||||
validateConditionalHeaders:
|
||||
require('./lib/s3middleware/validateConditionalHeaders')
|
||||
.validateConditionalHeaders,
|
||||
MD5Sum: require('./lib/s3middleware/MD5Sum'),
|
||||
objectUtils: require('./lib/s3middleware/objectUtils'),
|
||||
azureHelper: {
|
||||
mpuUtils:
|
||||
require('./lib/s3middleware/azureHelpers/mpuUtils'),
|
||||
ResultsCollector:
|
||||
require('./lib/s3middleware/azureHelpers/ResultsCollector'),
|
||||
SubStreamInterface:
|
||||
require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
|
||||
},
|
||||
},
|
||||
storage: {
|
||||
metadata: {
|
||||
|
@ -83,12 +94,17 @@ module.exports = {
|
|||
},
|
||||
utils: require('./lib/storage/utils'),
|
||||
},
|
||||
|
||||
models: {
|
||||
BucketInfo: require('./lib/models/BucketInfo'),
|
||||
ObjectMD: require('./lib/models/ObjectMD'),
|
||||
ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
|
||||
ARN: require('./lib/models/ARN'),
|
||||
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
|
||||
ReplicationConfiguration:
|
||||
require('./lib/models/ReplicationConfiguration'),
|
||||
},
|
||||
metrics: {
|
||||
StatsClient: require('./lib/metrics/StatsClient'),
|
||||
RedisClient: require('./lib/metrics/RedisClient'),
|
||||
},
|
||||
};
|
||||
|
|
|
@ -49,6 +49,14 @@ class AuthInfo {
|
|||
isRequesterPublicUser() {
|
||||
return this.canonicalID === constants.publicId;
|
||||
}
|
||||
isRequesterAServiceAccount() {
|
||||
return this.canonicalID.startsWith(
|
||||
`${constants.zenkoServiceAccount}/`);
|
||||
}
|
||||
isRequesterThisServiceAccount(serviceName) {
|
||||
return this.canonicalID ===
|
||||
`${constants.zenkoServiceAccount}/${serviceName}`;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = AuthInfo;
|
||||
|
|
|
@ -12,6 +12,7 @@ const convertUTCtoISO8601 = require('./v4/timeUtils').convertUTCtoISO8601;
|
|||
const vaultUtilities = require('./in_memory/vaultUtilities');
|
||||
const backend = require('./in_memory/Backend');
|
||||
const validateAuthConfig = require('./in_memory/validateAuthConfig');
|
||||
const AuthLoader = require('./in_memory/AuthLoader');
|
||||
const Vault = require('./Vault');
|
||||
|
||||
let vault = null;
|
||||
|
@ -152,10 +153,11 @@ function doAuth(request, log, cb, awsService, requestContexts) {
|
|||
* @param {string} accessKey - the accessKey
|
||||
* @param {string} secretKeyValue - the secretKey
|
||||
* @param {string} awsService - Aws service related
|
||||
* @param {sting} [proxyPath] - path that gets proxied by reverse proxy
|
||||
* @return {undefined}
|
||||
*/
|
||||
function generateV4Headers(request, data, accessKey, secretKeyValue,
|
||||
awsService) {
|
||||
awsService, proxyPath) {
|
||||
Object.assign(request, { headers: {} });
|
||||
const amzDate = convertUTCtoISO8601(Date.now());
|
||||
// get date without time
|
||||
|
@ -187,7 +189,7 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
|
|||
).sort().join(';');
|
||||
const params = { request, signedHeaders, payloadChecksum,
|
||||
credentialScope, timestamp, query: data,
|
||||
awsService: service };
|
||||
awsService: service, proxyPath };
|
||||
const stringToSign = constructStringToSignV4(params);
|
||||
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue,
|
||||
region,
|
||||
|
@ -214,6 +216,7 @@ module.exports = {
|
|||
inMemory: {
|
||||
backend,
|
||||
validateAuthConfig,
|
||||
AuthLoader,
|
||||
},
|
||||
AuthInfo,
|
||||
Vault,
|
||||
|
|
|
@ -0,0 +1,223 @@
|
|||
const fs = require('fs');
|
||||
const glob = require('simple-glob');
|
||||
const joi = require('joi');
|
||||
const werelogs = require('werelogs');
|
||||
|
||||
const ARN = require('../../models/ARN');
|
||||
|
||||
/**
|
||||
* Load authentication information from files or pre-loaded account
|
||||
* objects
|
||||
*
|
||||
* @class AuthLoader
|
||||
*/
|
||||
class AuthLoader {
|
||||
constructor(logApi) {
|
||||
this._log = new (logApi || werelogs).Logger('S3');
|
||||
this._authData = { accounts: [] };
|
||||
// null: unknown validity, true/false: valid or invalid
|
||||
this._isValid = null;
|
||||
|
||||
this._joiKeysValidator = joi.array()
|
||||
.items({
|
||||
access: joi.string().required(),
|
||||
secret: joi.string().required(),
|
||||
})
|
||||
.required();
|
||||
|
||||
const accountsJoi = joi.array()
|
||||
.items({
|
||||
name: joi.string().required(),
|
||||
email: joi.string().email().required(),
|
||||
arn: joi.string().required(),
|
||||
canonicalID: joi.string().required(),
|
||||
shortid: joi.string().regex(/^[0-9]{12}$/).required(),
|
||||
keys: this._joiKeysValidator,
|
||||
// backward-compat
|
||||
users: joi.array(),
|
||||
})
|
||||
.required()
|
||||
.unique('arn')
|
||||
.unique('email')
|
||||
.unique('canonicalID');
|
||||
this._joiValidator = joi.object({ accounts: accountsJoi });
|
||||
}
|
||||
|
||||
/**
|
||||
* add one or more accounts to the authentication info
|
||||
*
|
||||
* @param {object} authData - authentication data
|
||||
* @param {object[]} authData.accounts - array of account data
|
||||
* @param {string} authData.accounts[].name - account name
|
||||
* @param {string} authData.accounts[].email: email address
|
||||
* @param {string} authData.accounts[].arn: account ARN,
|
||||
* e.g. 'arn:aws:iam::123456789012:root'
|
||||
* @param {string} authData.accounts[].canonicalID account
|
||||
* canonical ID
|
||||
* @param {string} authData.accounts[].shortid account ID number,
|
||||
* e.g. '123456789012'
|
||||
* @param {object[]} authData.accounts[].keys array of
|
||||
* access/secret keys
|
||||
* @param {object[]} authData.accounts[].keys[].access access key
|
||||
* @param {object[]} authData.accounts[].keys[].secret secret key
|
||||
* @param {string} [filePath] - optional file path info for
|
||||
* logging purpose
|
||||
* @return {undefined}
|
||||
*/
|
||||
addAccounts(authData, filePath) {
|
||||
const isValid = this._validateData(authData, filePath);
|
||||
if (isValid) {
|
||||
this._authData.accounts =
|
||||
this._authData.accounts.concat(authData.accounts);
|
||||
// defer validity checking when getting data to avoid
|
||||
// logging multiple times the errors (we need to validate
|
||||
// all accounts at once to detect duplicate values)
|
||||
if (this._isValid) {
|
||||
this._isValid = null;
|
||||
}
|
||||
} else {
|
||||
this._isValid = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* add account information from a file
|
||||
*
|
||||
* @param {string} filePath - file path containing JSON
|
||||
* authentication info (see {@link addAccounts()} for format)
|
||||
* @return {undefined}
|
||||
*/
|
||||
addFile(filePath) {
|
||||
const authData = JSON.parse(fs.readFileSync(filePath));
|
||||
this.addAccounts(authData, filePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* add account information from a filesystem path
|
||||
*
|
||||
* @param {string|string[]} globPattern - filesystem glob pattern,
|
||||
* can be a single string or an array of glob patterns. Globs
|
||||
* can be simple file paths or can contain glob matching
|
||||
* characters, like '/a/b/*.json'. The matching files are
|
||||
* individually loaded as JSON and accounts are added. See
|
||||
* {@link addAccounts()} for JSON format.
|
||||
* @return {undefined}
|
||||
*/
|
||||
addFilesByGlob(globPattern) {
|
||||
const files = glob(globPattern);
|
||||
files.forEach(filePath => this.addFile(filePath));
|
||||
}
|
||||
|
||||
/**
|
||||
* perform validation on authentication info previously
|
||||
* loaded. Note that it has to be done on the entire set after an
|
||||
* update to catch duplicate account IDs or access keys.
|
||||
*
|
||||
* @return {boolean} true if authentication info is valid
|
||||
* false otherwise
|
||||
*/
|
||||
validate() {
|
||||
if (this._isValid === null) {
|
||||
this._isValid = this._validateData(this._authData);
|
||||
}
|
||||
return this._isValid;
|
||||
}
|
||||
|
||||
/**
|
||||
* get authentication info as a plain JS object containing all accounts
|
||||
* under the "accounts" attribute, with validation.
|
||||
*
|
||||
* @return {object|null} the validated authentication data
|
||||
* null if invalid
|
||||
*/
|
||||
getData() {
|
||||
return this.validate() ? this._authData : null;
|
||||
}
|
||||
|
||||
_validateData(authData, filePath) {
|
||||
const res = joi.validate(authData, this._joiValidator,
|
||||
{ abortEarly: false });
|
||||
if (res.error) {
|
||||
this._dumpJoiErrors(res.error.details, filePath);
|
||||
return false;
|
||||
}
|
||||
let allKeys = [];
|
||||
let arnError = false;
|
||||
const validatedAuth = res.value;
|
||||
validatedAuth.accounts.forEach(account => {
|
||||
// backward-compat: ignore arn if starts with 'aws:' and log a
|
||||
// warning
|
||||
if (account.arn.startsWith('aws:')) {
|
||||
this._log.error(
|
||||
'account must have a valid AWS ARN, legacy examples ' +
|
||||
'starting with \'aws:\' are not supported anymore. ' +
|
||||
'Please convert to a proper account entry (see ' +
|
||||
'examples at https://github.com/scality/S3/blob/' +
|
||||
'master/conf/authdata.json). Also note that support ' +
|
||||
'for account users has been dropped.',
|
||||
{ accountName: account.name, accountArn: account.arn,
|
||||
filePath });
|
||||
arnError = true;
|
||||
return;
|
||||
}
|
||||
if (account.users) {
|
||||
this._log.error(
|
||||
'support for account users has been dropped, consider ' +
|
||||
'turning users into account entries (see examples at ' +
|
||||
'https://github.com/scality/S3/blob/master/conf/' +
|
||||
'authdata.json)',
|
||||
{ accountName: account.name, accountArn: account.arn,
|
||||
filePath });
|
||||
arnError = true;
|
||||
return;
|
||||
}
|
||||
const arnObj = ARN.createFromString(account.arn);
|
||||
if (arnObj.error) {
|
||||
this._log.error(
|
||||
'authentication config validation error',
|
||||
{ reason: arnObj.error.description,
|
||||
accountName: account.name, accountArn: account.arn,
|
||||
filePath });
|
||||
arnError = true;
|
||||
return;
|
||||
}
|
||||
if (!arnObj.isIAMAccount()) {
|
||||
this._log.error(
|
||||
'authentication config validation error',
|
||||
{ reason: 'not an IAM account ARN',
|
||||
accountName: account.name, accountArn: account.arn,
|
||||
filePath });
|
||||
arnError = true;
|
||||
return;
|
||||
}
|
||||
allKeys = allKeys.concat(account.keys);
|
||||
});
|
||||
if (arnError) {
|
||||
return false;
|
||||
}
|
||||
const uniqueKeysRes = joi.validate(
|
||||
allKeys, this._joiKeysValidator.unique('access'));
|
||||
if (uniqueKeysRes.error) {
|
||||
this._dumpJoiErrors(uniqueKeysRes.error.details, filePath);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
_dumpJoiErrors(errors, filePath) {
|
||||
errors.forEach(err => {
|
||||
const logInfo = { item: err.path, filePath };
|
||||
if (err.type === 'array.unique') {
|
||||
logInfo.reason = `duplicate value '${err.context.path}'`;
|
||||
logInfo.dupValue = err.context.value[err.context.path];
|
||||
} else {
|
||||
logInfo.reason = err.message;
|
||||
logInfo.context = err.context;
|
||||
}
|
||||
this._log.error('authentication config validation error',
|
||||
logInfo);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = AuthLoader;
|
|
@ -7,10 +7,6 @@ const calculateSigningKey = require('./vaultUtilities').calculateSigningKey;
|
|||
const hashSignature = require('./vaultUtilities').hashSignature;
|
||||
const Indexer = require('./Indexer');
|
||||
|
||||
function _buildArn(service, generalResource, specificResource) {
|
||||
return `arn:aws:${service}:::${generalResource}/${specificResource}`;
|
||||
}
|
||||
|
||||
function _formatResponse(userInfoToSend) {
|
||||
return {
|
||||
message: {
|
||||
|
@ -42,7 +38,7 @@ class Backend {
|
|||
/** verifySignatureV2
|
||||
* @param {string} stringToSign - string to sign built per AWS rules
|
||||
* @param {string} signatureFromRequest - signature sent with request
|
||||
* @param {string} accessKey - user's accessKey
|
||||
* @param {string} accessKey - account accessKey
|
||||
* @param {object} options - contains algorithm (SHA1 or SHA256)
|
||||
* @param {function} callback - callback with either error or user info
|
||||
* @return {function} calls callback
|
||||
|
@ -73,7 +69,7 @@ class Backend {
|
|||
/** verifySignatureV4
|
||||
* @param {string} stringToSign - string to sign built per AWS rules
|
||||
* @param {string} signatureFromRequest - signature sent with request
|
||||
* @param {string} accessKey - user's accessKey
|
||||
* @param {string} accessKey - account accessKey
|
||||
* @param {string} region - region specified in request credential
|
||||
* @param {string} scopeDate - date specified in request credential
|
||||
* @param {object} options - options to send to Vault
|
||||
|
@ -161,55 +157,6 @@ class Backend {
|
|||
};
|
||||
return cb(null, vaultReturnObject);
|
||||
}
|
||||
|
||||
/**
|
||||
* Mocks Vault's response to a policy evaluation request
|
||||
* Since policies not actually implemented in memory backend,
|
||||
* we allow users to proceed with request.
|
||||
* @param {object} requestContextParams - parameters needed to construct
|
||||
* requestContext in Vault
|
||||
* @param {object} requestContextParams.constantParams -
|
||||
* params that have the
|
||||
* same value for each requestContext to be constructed in Vault
|
||||
* @param {object} requestContextParams.paramaterize - params that have
|
||||
* arrays as values since a requestContext needs to be constructed with
|
||||
* each option in Vault
|
||||
* @param {object[]} requestContextParams.paramaterize.specificResource -
|
||||
* specific resources paramaterized as an array of objects containing
|
||||
* properties `key` and optional `versionId`
|
||||
* @param {string} userArn - arn of requesting user
|
||||
* @param {object} log - log object
|
||||
* @param {function} cb - callback with either error or an array
|
||||
* of authorization results
|
||||
* @returns {undefined}
|
||||
* @callback called with (err, vaultReturnObject)
|
||||
*/
|
||||
checkPolicies(requestContextParams, userArn, log, cb) {
|
||||
let results;
|
||||
const parameterizeParams = requestContextParams.parameterize;
|
||||
if (parameterizeParams && parameterizeParams.specificResource) {
|
||||
// object is parameterized
|
||||
results = parameterizeParams.specificResource.map(obj => ({
|
||||
isAllowed: true,
|
||||
arn: _buildArn(this.service, requestContextParams
|
||||
.constantParams.generalResource, obj.key),
|
||||
versionId: obj.versionId,
|
||||
}));
|
||||
} else {
|
||||
results = [{
|
||||
isAllowed: true,
|
||||
arn: _buildArn(this.service, requestContextParams
|
||||
.constantParams.generalResource, requestContextParams
|
||||
.constantParams.specificResource),
|
||||
}];
|
||||
}
|
||||
const vaultReturnObject = {
|
||||
message: {
|
||||
body: results,
|
||||
},
|
||||
};
|
||||
return cb(null, vaultReturnObject);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -226,9 +173,6 @@ class S3AuthBackend extends Backend {
|
|||
* @param {object[]=} authdata.accounts[].keys - array of key objects
|
||||
* @param {string} authdata.accounts[].keys[].access - access key
|
||||
* @param {string} authdata.accounts[].keys[].secret - secret key
|
||||
* @param {object[]=} authdata.accounts[].users - array of user objects:
|
||||
* note, same properties as account except no canonical ID / sas token
|
||||
* @param {string=} authdata.accounts[].sasToken - Azure SAS token
|
||||
* @return {undefined}
|
||||
*/
|
||||
constructor(authdata) {
|
||||
|
|
|
@ -19,9 +19,6 @@ class Indexer {
|
|||
* @param {object[]=} authdata.accounts[].keys - array of key objects
|
||||
* @param {string} authdata.accounts[].keys[].access - access key
|
||||
* @param {string} authdata.accounts[].keys[].secret - secret key
|
||||
* @param {object[]=} authdata.accounts[].users - array of user objects:
|
||||
* note, same properties as account except no canonical ID / sas token
|
||||
* @param {string=} authdata.accounts[].sasToken - Azure SAS token
|
||||
* @return {undefined}
|
||||
*/
|
||||
constructor(authdata) {
|
||||
|
@ -30,10 +27,6 @@ class Indexer {
|
|||
accessKey: {},
|
||||
email: {},
|
||||
};
|
||||
this.usersBy = {
|
||||
accessKey: {},
|
||||
email: {},
|
||||
};
|
||||
|
||||
/*
|
||||
* This may happen if the application is configured to use another
|
||||
|
@ -47,23 +40,6 @@ class Indexer {
|
|||
this._build(authdata);
|
||||
}
|
||||
|
||||
_indexUser(account, user) {
|
||||
const userData = {
|
||||
arn: account.arn,
|
||||
canonicalID: account.canonicalID,
|
||||
shortid: account.shortid,
|
||||
accountDisplayName: account.accountDisplayName,
|
||||
IAMdisplayName: user.name,
|
||||
email: user.email.toLowerCase(),
|
||||
keys: [],
|
||||
};
|
||||
this.usersBy.email[userData.email] = userData;
|
||||
user.keys.forEach(key => {
|
||||
userData.keys.push(key);
|
||||
this.usersBy.accessKey[key.access] = userData;
|
||||
});
|
||||
}
|
||||
|
||||
_indexAccount(account) {
|
||||
const accountData = {
|
||||
arn: account.arn,
|
||||
|
@ -81,11 +57,6 @@ class Indexer {
|
|||
this.accountsBy.accessKey[key.access] = accountData;
|
||||
});
|
||||
}
|
||||
if (account.users !== undefined) {
|
||||
account.users.forEach(user => {
|
||||
this._indexUser(accountData, user);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
_build(authdata) {
|
||||
|
@ -126,11 +97,8 @@ class Indexer {
|
|||
* @return {Object} entity.email - The entity's lowercased email
|
||||
*/
|
||||
getEntityByKey(key) {
|
||||
if (this.accountsBy.accessKey.hasOwnProperty(key)) {
|
||||
return this.accountsBy.accessKey[key];
|
||||
}
|
||||
return this.usersBy.accessKey[key];
|
||||
}
|
||||
|
||||
/**
|
||||
* This method returns the entity (either an account or a user) associated
|
||||
|
@ -150,9 +118,6 @@ class Indexer {
|
|||
*/
|
||||
getEntityByEmail(email) {
|
||||
const lowerCasedEmail = email.toLowerCase();
|
||||
if (this.usersBy.email.hasOwnProperty(lowerCasedEmail)) {
|
||||
return this.usersBy.email[lowerCasedEmail];
|
||||
}
|
||||
return this.accountsBy.email[lowerCasedEmail];
|
||||
}
|
||||
|
||||
|
|
|
@ -1,194 +1,18 @@
|
|||
const werelogs = require('werelogs');
|
||||
|
||||
function _incr(count) {
|
||||
if (count !== undefined) {
|
||||
return count + 1;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
const AuthLoader = require('./AuthLoader');
|
||||
|
||||
/**
|
||||
* This function ensures that the field `name` inside `container` is of the
|
||||
* expected `type` inside `obj`. If any error is found, an entry is added into
|
||||
* the error collector object.
|
||||
* @deprecated please use {@link AuthLoader} class instead
|
||||
*
|
||||
* @param {object} data - the error collector object
|
||||
* @param {string} container - the name of the entity that contains
|
||||
* what we're checking
|
||||
* @param {string} name - the name of the entity we're checking for
|
||||
* @param {string} type - expected typename of the entity we're checking
|
||||
* @param {object} obj - the object we're checking the fields of
|
||||
* @return {boolean} true if the type is Ok and no error found
|
||||
* false if an error was found and reported
|
||||
*/
|
||||
function _checkType(data, container, name, type, obj) {
|
||||
if ((type === 'array' && !Array.isArray(obj[name]))
|
||||
|| (type !== 'array' && typeof obj[name] !== type)) {
|
||||
data.errors.push({
|
||||
txt: 'property is not of the expected type',
|
||||
obj: {
|
||||
entity: container,
|
||||
property: name,
|
||||
type: typeof obj[name],
|
||||
expectedType: type,
|
||||
},
|
||||
});
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function ensures that the field `name` inside `obj` which is a
|
||||
* `container`. If any error is found, an entry is added into the error
|
||||
* collector object.
|
||||
*
|
||||
* @param {object} data - the error collector object
|
||||
* @param {string} container - the name of the entity that contains
|
||||
* what we're checking
|
||||
* @param {string} name - the name of the entity we're checking for
|
||||
* @param {string} type - expected typename of the entity we're checking
|
||||
* @param {object} obj - the object we're checking the fields of
|
||||
* @return {boolean} true if the field exists and type is Ok
|
||||
* false if an error was found and reported
|
||||
*/
|
||||
function _checkExists(data, container, name, type, obj) {
|
||||
if (obj[name] === undefined) {
|
||||
data.errors.push({
|
||||
txt: 'missing property in auth entity',
|
||||
obj: {
|
||||
entity: container,
|
||||
property: name,
|
||||
},
|
||||
});
|
||||
return false;
|
||||
}
|
||||
return _checkType(data, container, name, type, obj);
|
||||
}
|
||||
|
||||
function _checkUser(data, userObj) {
|
||||
if (_checkExists(data, 'User', 'arn', 'string', userObj)) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
data.arns[userObj.arn] = _incr(data.arns[userObj.arn]);
|
||||
}
|
||||
if (_checkExists(data, 'User', 'email', 'string', userObj)) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
data.emails[userObj.email] = _incr(data.emails[userObj.email]);
|
||||
}
|
||||
if (_checkExists(data, 'User', 'keys', 'array', userObj)) {
|
||||
userObj.keys.forEach(keyObj => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
data.keys[keyObj.access] = _incr(data.keys[keyObj.access]);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function _checkAccount(data, accountObj, checkSas) {
|
||||
if (_checkExists(data, 'Account', 'email', 'string', accountObj)) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
data.emails[accountObj.email] = _incr(data.emails[accountObj.email]);
|
||||
}
|
||||
if (_checkExists(data, 'Account', 'arn', 'string', accountObj)) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
data.arns[accountObj.arn] = _incr(data.arns[accountObj.arn]);
|
||||
}
|
||||
if (_checkExists(data, 'Account', 'canonicalID', 'string', accountObj)) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
data.canonicalIds[accountObj.canonicalID] =
|
||||
_incr(data.canonicalIds[accountObj.canonicalID]);
|
||||
}
|
||||
if (checkSas &&
|
||||
_checkExists(data, 'Account', 'sasToken', 'string', accountObj)) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
data.sasTokens[accountObj.sasToken] =
|
||||
_incr(data.sasTokens[accountObj.sasToken]);
|
||||
}
|
||||
|
||||
if (accountObj.users) {
|
||||
if (_checkType(data, 'Account', 'users', 'array', accountObj)) {
|
||||
accountObj.users.forEach(userObj => _checkUser(data, userObj));
|
||||
}
|
||||
}
|
||||
|
||||
if (accountObj.keys) {
|
||||
if (_checkType(data, 'Account', 'keys', 'array', accountObj)) {
|
||||
accountObj.keys.forEach(keyObj => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
data.keys[keyObj.access] = _incr(data.keys[keyObj.access]);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function _dumpCountError(property, obj, log) {
|
||||
let count = 0;
|
||||
Object.keys(obj).forEach(key => {
|
||||
if (obj[key] > 1) {
|
||||
log.error('property should be unique', {
|
||||
property,
|
||||
value: key,
|
||||
count: obj[key],
|
||||
});
|
||||
++count;
|
||||
}
|
||||
});
|
||||
return count;
|
||||
}
|
||||
|
||||
function _dumpErrors(checkData, log) {
|
||||
let nerr = _dumpCountError('CanonicalID', checkData.canonicalIds, log);
|
||||
nerr += _dumpCountError('Email', checkData.emails, log);
|
||||
nerr += _dumpCountError('ARN', checkData.arns, log);
|
||||
nerr += _dumpCountError('AccessKey', checkData.keys, log);
|
||||
nerr += _dumpCountError('SAS Token', checkData.sasTokens, log);
|
||||
|
||||
if (checkData.errors.length > 0) {
|
||||
checkData.errors.forEach(msg => {
|
||||
log.error(msg.txt, msg.obj);
|
||||
});
|
||||
}
|
||||
|
||||
if (checkData.errors.length === 0 && nerr === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
log.fatal('invalid authentication config file (cannot start)');
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {object} authdata - the authentication config file's data
|
||||
* @param {werelogs.API} logApi - object providing a constructor function
|
||||
* for the Logger object
|
||||
* @param {(boolean|null)} checkSas - whether to check Azure SAS for ea. account
|
||||
* @return {boolean} true on erroneous data
|
||||
* false on success
|
||||
*/
|
||||
function validateAuthConfig(authdata, logApi, checkSas) {
|
||||
const checkData = {
|
||||
errors: [],
|
||||
emails: [],
|
||||
arns: [],
|
||||
canonicalIds: [],
|
||||
keys: [],
|
||||
sasTokens: [],
|
||||
};
|
||||
const log = new (logApi || werelogs).Logger('S3');
|
||||
|
||||
|
||||
if (authdata.accounts === undefined) {
|
||||
checkData.errors.push({
|
||||
txt: 'no "accounts" array defined in Auth config',
|
||||
});
|
||||
return _dumpErrors(checkData, log);
|
||||
}
|
||||
|
||||
authdata.accounts.forEach(account => {
|
||||
_checkAccount(checkData, account, checkSas);
|
||||
});
|
||||
|
||||
return _dumpErrors(checkData, log);
|
||||
function validateAuthConfig(authdata, logApi) {
|
||||
const authLoader = new AuthLoader(logApi);
|
||||
authLoader.addAccounts(authdata);
|
||||
return !authLoader.validate();
|
||||
}
|
||||
|
||||
module.exports = validateAuthConfig;
|
||||
|
|
|
@ -27,7 +27,7 @@ function check(request, log, data) {
|
|||
milliseconds to compare to Date.now()
|
||||
*/
|
||||
const expirationTime = parseInt(data.Expires, 10) * 1000;
|
||||
if (isNaN(expirationTime)) {
|
||||
if (Number.isNaN(expirationTime)) {
|
||||
log.debug('invalid expires parameter',
|
||||
{ expires: data.Expires });
|
||||
return { err: errors.MissingSecurityHeader };
|
||||
|
|
|
@ -10,17 +10,13 @@ const createCanonicalRequest = require('./createCanonicalRequest');
|
|||
* @returns {string} - stringToSign
|
||||
*/
|
||||
function constructStringToSign(params) {
|
||||
const request = params.request;
|
||||
const signedHeaders = params.signedHeaders;
|
||||
const payloadChecksum = params.payloadChecksum;
|
||||
const credentialScope = params.credentialScope;
|
||||
const timestamp = params.timestamp;
|
||||
const query = params.query;
|
||||
const log = params.log;
|
||||
const { request, signedHeaders, payloadChecksum, credentialScope, timestamp,
|
||||
query, log, proxyPath } = params;
|
||||
const path = proxyPath || request.path;
|
||||
|
||||
const canonicalReqResult = createCanonicalRequest({
|
||||
pHttpVerb: request.method,
|
||||
pResource: request.path,
|
||||
pResource: path,
|
||||
pQuery: query,
|
||||
pHeaders: request.headers,
|
||||
pSignedHeaders: signedHeaders,
|
||||
|
|
|
@ -40,7 +40,8 @@ function createCanonicalRequest(params) {
|
|||
|
||||
// canonical query string
|
||||
let canonicalQueryStr = '';
|
||||
if (pQuery && !((service === 'iam' || service === 'ring') &&
|
||||
if (pQuery && !((service === 'iam' || service === 'ring' ||
|
||||
service === 'sts') &&
|
||||
pHttpVerb === 'POST')) {
|
||||
const sortedQueryParams = Object.keys(pQuery).sort().map(key => {
|
||||
const encodedKey = awsURIencode(key);
|
||||
|
|
|
@ -41,8 +41,9 @@ function validateCredentials(credentials, timestamp, log) {
|
|||
{ scopeDate, timestampDate });
|
||||
return errors.RequestTimeTooSkewed;
|
||||
}
|
||||
if (service !== 's3' && service !== 'iam' && service !== 'ring') {
|
||||
log.warn('service in credentials is not one of s3/iam/ring', {
|
||||
if (service !== 's3' && service !== 'iam' && service !== 'ring' &&
|
||||
service !== 'sts') {
|
||||
log.warn('service in credentials is not one of s3/iam/ring/sts', {
|
||||
service,
|
||||
});
|
||||
return errors.InvalidArgument;
|
||||
|
|
|
@ -20,6 +20,7 @@ module.exports = {
|
|||
// no authentication information. Requestor can access
|
||||
// only public resources
|
||||
publicId: 'http://acs.amazonaws.com/groups/global/AllUsers',
|
||||
zenkoServiceAccount: 'http://acs.zenko.io/accounts/service',
|
||||
metadataFileNamespace: '/MDFile',
|
||||
dataFileURL: '/DataFile',
|
||||
// AWS states max size for user-defined metadata
|
||||
|
@ -29,4 +30,41 @@ module.exports = {
|
|||
// so we do the same.
|
||||
maximumMetaHeadersSize: 2136,
|
||||
emptyFileMd5: 'd41d8cd98f00b204e9800998ecf8427e',
|
||||
// Version 2 changes the format of the data location property
|
||||
// Version 3 adds the dataStoreName attribute
|
||||
mdModelVersion: 3,
|
||||
/*
|
||||
* Splitter is used to build the object name for the overview of a
|
||||
* multipart upload and to build the object names for each part of a
|
||||
* multipart upload. These objects with large names are then stored in
|
||||
* metadata in a "shadow bucket" to a real bucket. The shadow bucket
|
||||
* contains all ongoing multipart uploads. We include in the object
|
||||
* name some of the info we might need to pull about an open multipart
|
||||
* upload or about an individual part with each piece of info separated
|
||||
* by the splitter. We can then extract each piece of info by splitting
|
||||
* the object name string with this splitter.
|
||||
* For instance, assuming a splitter of '...!*!',
|
||||
* the name of the upload overview would be:
|
||||
* overview...!*!objectKey...!*!uploadId
|
||||
* For instance, the name of a part would be:
|
||||
* uploadId...!*!partNumber
|
||||
*
|
||||
* The sequence of characters used in the splitter should not occur
|
||||
* elsewhere in the pieces of info to avoid splitting where not
|
||||
* intended.
|
||||
*
|
||||
* Splitter is also used in adding bucketnames to the
|
||||
* namespacerusersbucket. The object names added to the
|
||||
* namespaceusersbucket are of the form:
|
||||
* canonicalID...!*!bucketname
|
||||
*/
|
||||
|
||||
splitter: '..|..',
|
||||
usersBucket: 'users..bucket',
|
||||
// MPU Bucket Prefix is used to create the name of the shadow
|
||||
// bucket used for multipart uploads. There is one shadow mpu
|
||||
// bucket per bucket and its name is the mpuBucketPrefix followed
|
||||
// by the name of the final destination bucket for the object
|
||||
// once the multipart upload is complete.
|
||||
mpuBucketPrefix: 'mpuShadowBucket',
|
||||
};
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
const Redis = require('ioredis');
|
||||
|
||||
class RedisClient {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {Object} config - config
|
||||
* @param {string} config.host - Redis host
|
||||
* @param {number} config.port - Redis port
|
||||
* @param {string} config.password - Redis password
|
||||
* @param {werelogs.Logger} logger - logger instance
|
||||
*/
|
||||
constructor(config, logger) {
|
||||
this._client = new Redis(config);
|
||||
this._client.on('error', err =>
|
||||
logger.trace('error from redis', {
|
||||
error: err,
|
||||
method: 'RedisClient.constructor',
|
||||
redisHost: config.host,
|
||||
redisPort: config.port,
|
||||
})
|
||||
);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* increment value of a key by 1 and set a ttl
|
||||
* @param {string} key - key holding the value
|
||||
* @param {number} expiry - expiry in seconds
|
||||
* @param {callback} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
incrEx(key, expiry, cb) {
|
||||
return this._client
|
||||
.multi([['incr', key], ['expire', key, expiry]])
|
||||
.exec(cb);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* execute a batch of commands
|
||||
* @param {string[]} cmds - list of commands
|
||||
* @param {callback} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
batch(cmds, cb) {
|
||||
return this._client.pipeline(cmds).exec(cb);
|
||||
}
|
||||
|
||||
clear(cb) {
|
||||
return this._client.flushdb(cb);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RedisClient;
|
|
@ -0,0 +1,150 @@
|
|||
const async = require('async');
|
||||
|
||||
class StatsClient {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {object} redisClient - RedisClient instance
|
||||
* @param {number} interval - sampling interval by seconds
|
||||
* @param {number} expiry - sampling duration by seconds
|
||||
*/
|
||||
constructor(redisClient, interval, expiry) {
|
||||
this._redis = redisClient;
|
||||
this._interval = interval;
|
||||
this._expiry = expiry;
|
||||
return this;
|
||||
}
|
||||
|
||||
/*
|
||||
* Utility function to use when callback is undefined
|
||||
*/
|
||||
_noop() {}
|
||||
|
||||
/**
|
||||
* normalize to the nearest interval
|
||||
* @param {object} d - Date instance
|
||||
* @return {number} timestamp - normalized to the nearest interval
|
||||
*/
|
||||
_normalizeTimestamp(d) {
|
||||
const s = d.getSeconds();
|
||||
return d.setSeconds(s - s % this._interval, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* set timestamp to the previous interval
|
||||
* @param {object} d - Date instance
|
||||
* @return {number} timestamp - set to the previous interval
|
||||
*/
|
||||
_setPrevInterval(d) {
|
||||
return d.setSeconds(d.getSeconds() - this._interval);
|
||||
}
|
||||
|
||||
/**
|
||||
* build redis key to get total number of occurrences on the server
|
||||
* @param {string} name - key name identifier
|
||||
* @param {object} d - Date instance
|
||||
* @return {string} key - key for redis
|
||||
*/
|
||||
_buildKey(name, d) {
|
||||
return `${name}:${this._normalizeTimestamp(d)}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* reduce the array of values to a single value
|
||||
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
|
||||
* @param {array} arr - Date instance
|
||||
* @return {string} key - key for redis
|
||||
*/
|
||||
_getCount(arr) {
|
||||
return arr.reduce((prev, a) => {
|
||||
let num = parseInt(a[1], 10);
|
||||
num = Number.isNaN(num) ? 0 : num;
|
||||
return prev + num;
|
||||
}, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* report/record a new request received on the server
|
||||
* @param {string} id - service identifier
|
||||
* @param {callback} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
reportNewRequest(id, cb) {
|
||||
if (!this._redis) {
|
||||
return undefined;
|
||||
}
|
||||
const callback = cb || this._noop;
|
||||
const key = this._buildKey(`${id}:requests`, new Date());
|
||||
return this._redis.incrEx(key, this._expiry, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* report/record a request that ended up being a 500 on the server
|
||||
* @param {string} id - service identifier
|
||||
* @param {callback} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
report500(id, cb) {
|
||||
if (!this._redis) {
|
||||
return undefined;
|
||||
}
|
||||
const callback = cb || this._noop;
|
||||
const key = this._buildKey(`${id}:500s`, new Date());
|
||||
return this._redis.incrEx(key, this._expiry, callback);
|
||||
}
|
||||
|
||||
/**
|
||||
* get stats for the last x seconds, x being the sampling duration
|
||||
* @param {object} log - Werelogs request logger
|
||||
* @param {string} id - service identifier
|
||||
* @param {callback} cb - callback to call with the err/result
|
||||
* @return {undefined}
|
||||
*/
|
||||
getStats(log, id, cb) {
|
||||
if (!this._redis) {
|
||||
return cb(null, {});
|
||||
}
|
||||
const d = new Date();
|
||||
const totalKeys = Math.floor(this._expiry / this._interval);
|
||||
const reqsKeys = [];
|
||||
const req500sKeys = [];
|
||||
for (let i = 0; i < totalKeys; i++) {
|
||||
reqsKeys.push(['get', this._buildKey(`${id}:requests`, d)]);
|
||||
req500sKeys.push(['get', this._buildKey(`${id}:500s`, d)]);
|
||||
this._setPrevInterval(d);
|
||||
}
|
||||
return async.parallel([
|
||||
next => this._redis.batch(reqsKeys, next),
|
||||
next => this._redis.batch(req500sKeys, next),
|
||||
], (err, results) => {
|
||||
/**
|
||||
* Batch result is of the format
|
||||
* [ [null, '1'], [null, '2'], [null, '3'] ] where each
|
||||
* item is the result of the each batch command
|
||||
* Foreach item in the result, index 0 signifies the error and
|
||||
* index 1 contains the result
|
||||
*/
|
||||
const statsRes = {
|
||||
'requests': 0,
|
||||
'500s': 0,
|
||||
'sampleDuration': this._expiry,
|
||||
};
|
||||
if (err) {
|
||||
log.error('error getting stats', {
|
||||
error: err,
|
||||
method: 'StatsClient.getStats',
|
||||
});
|
||||
/**
|
||||
* Redis for stats is not a critial component, ignoring
|
||||
* any error here as returning an InternalError
|
||||
* would be confused with the health of the service
|
||||
*/
|
||||
return cb(null, statsRes);
|
||||
}
|
||||
statsRes.requests = this._getCount(results[0]);
|
||||
statsRes['500s'] = this._getCount(results[1]);
|
||||
return cb(null, statsRes);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = StatsClient;
|
|
@ -0,0 +1,106 @@
|
|||
const errors = require('../errors');
|
||||
|
||||
const validServices = {
|
||||
aws: ['s3', 'iam', 'sts', 'ring'],
|
||||
scality: ['utapi', 'sso'],
|
||||
};
|
||||
|
||||
class ARN {
|
||||
/**
|
||||
*
|
||||
* Create an ARN object from its individual components
|
||||
*
|
||||
* @constructor
|
||||
* @param {string} partition - ARN partition (e.g. 'aws')
|
||||
* @param {string} service - service name in partition (e.g. 's3')
|
||||
* @param {string} [region] - AWS region
|
||||
* @param {string} [accountId] - AWS 12-digit account ID
|
||||
* @param {string} resource - AWS resource path (e.g. 'foo/bar')
|
||||
*/
|
||||
constructor(partition, service, region, accountId, resource) {
|
||||
this._partition = partition;
|
||||
this._service = service;
|
||||
this._region = region || null;
|
||||
this._accountId = accountId || null;
|
||||
this._resource = resource;
|
||||
}
|
||||
|
||||
static createFromString(arnStr) {
|
||||
const [arn, partition, service, region, accountId,
|
||||
resourceType, resource] = arnStr.split(':');
|
||||
|
||||
if (arn !== 'arn') {
|
||||
return { error: errors.InvalidArgument.customizeDescription(
|
||||
'bad ARN: must start with "arn:"') };
|
||||
}
|
||||
if (!partition) {
|
||||
return { error: errors.InvalidArgument.customizeDescription(
|
||||
'bad ARN: must include a partition name, like "aws" in ' +
|
||||
'"arn:aws:..."') };
|
||||
}
|
||||
if (!service) {
|
||||
return { error: errors.InvalidArgument.customizeDescription(
|
||||
'bad ARN: must include a service name, like "s3" in ' +
|
||||
'"arn:aws:s3:..."') };
|
||||
}
|
||||
if (validServices[partition] === undefined) {
|
||||
return { error: errors.InvalidArgument.customizeDescription(
|
||||
`bad ARN: unknown partition "${partition}", should be a ` +
|
||||
'valid partition name like "aws" in "arn:aws:..."') };
|
||||
}
|
||||
if (!validServices[partition].includes(service)) {
|
||||
return { error: errors.InvalidArgument.customizeDescription(
|
||||
`bad ARN: unsupported ${partition} service "${service}"`) };
|
||||
}
|
||||
if (accountId && !/^([0-9]{12}|[*])$/.test(accountId)) {
|
||||
return { error: errors.InvalidArgument.customizeDescription(
|
||||
`bad ARN: bad account ID "${accountId}": ` +
|
||||
'must be a 12-digit number or "*"') };
|
||||
}
|
||||
const fullResource = (resource !== undefined ?
|
||||
`${resourceType}:${resource}` : resourceType);
|
||||
return new ARN(partition, service, region, accountId, fullResource);
|
||||
}
|
||||
|
||||
getPartition() {
|
||||
return this._partition;
|
||||
}
|
||||
getService() {
|
||||
return this._service;
|
||||
}
|
||||
getRegion() {
|
||||
return this._region;
|
||||
}
|
||||
getAccountId() {
|
||||
return this._accountId;
|
||||
}
|
||||
getResource() {
|
||||
return this._resource;
|
||||
}
|
||||
|
||||
isIAMAccount() {
|
||||
return this.getService() === 'iam'
|
||||
&& this.getAccountId() !== null
|
||||
&& this.getAccountId() !== '*'
|
||||
&& this.getResource() === 'root';
|
||||
}
|
||||
isIAMUser() {
|
||||
return this.getService() === 'iam'
|
||||
&& this.getAccountId() !== null
|
||||
&& this.getAccountId() !== '*'
|
||||
&& this.getResource().startsWith('user/');
|
||||
}
|
||||
isIAMRole() {
|
||||
return this.getService() === 'iam'
|
||||
&& this.getAccountId() !== null
|
||||
&& this.getResource().startsWith('role');
|
||||
}
|
||||
|
||||
toString() {
|
||||
return ['arn', this.getPartition(), this.getService(),
|
||||
this.getRegion(), this.getAccountId(), this.getResource()]
|
||||
.join(':');
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ARN;
|
|
@ -1,23 +1,66 @@
|
|||
const constants = require('../constants');
|
||||
const VersionIDUtils = require('../versioning/VersionID');
|
||||
|
||||
// Version 2 changes the format of the data location property
|
||||
// Version 3 adds the dataStoreName attribute
|
||||
const modelVersion = 3;
|
||||
const ObjectMDLocation = require('./ObjectMDLocation');
|
||||
|
||||
/**
|
||||
* Class to manage metadata object for regular s3 objects (instead of
|
||||
* mpuPart metadata for example)
|
||||
*/
|
||||
module.exports = class ObjectMD {
|
||||
class ObjectMD {
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
|
||||
* reserved for internal use, users should call
|
||||
* {@link ObjectMD.createFromBlob()} to load from a stored
|
||||
* metadata blob and check the returned value for errors.
|
||||
*
|
||||
* @param {number} version - Version of the metadata model
|
||||
* @constructor
|
||||
* @param {ObjectMD|object} [objMd] - object metadata source,
|
||||
* either an ObjectMD instance or a native JS object parsed from
|
||||
* JSON
|
||||
*/
|
||||
constructor(version) {
|
||||
const now = new Date().toJSON();
|
||||
constructor(objMd = undefined) {
|
||||
this._initMd();
|
||||
if (objMd !== undefined) {
|
||||
if (objMd instanceof ObjectMD) {
|
||||
this._updateFromObjectMD(objMd);
|
||||
} else {
|
||||
this._updateFromParsedJSON(objMd);
|
||||
}
|
||||
} else {
|
||||
// set newly-created object md modified time to current time
|
||||
this._data['last-modified'] = new Date().toJSON();
|
||||
}
|
||||
// set latest md model version now that we ensured
|
||||
// backward-compat conversion
|
||||
this._data['md-model-version'] = constants.mdModelVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* create an ObjectMD instance from stored metadata
|
||||
*
|
||||
* @param {String|Buffer} storedBlob - serialized metadata blob
|
||||
* @return {object} a result object containing either a 'result'
|
||||
* property which value is a new ObjectMD instance on success, or
|
||||
* an 'error' property on error
|
||||
*/
|
||||
static createFromBlob(storedBlob) {
|
||||
try {
|
||||
const objMd = JSON.parse(storedBlob);
|
||||
return { result: new ObjectMD(objMd) };
|
||||
} catch (err) {
|
||||
return { error: err };
|
||||
}
|
||||
}
|
||||
|
||||
getSerialized() {
|
||||
return JSON.stringify(this.getValue());
|
||||
}
|
||||
|
||||
_initMd() {
|
||||
// initialize md with default values
|
||||
this._data = {
|
||||
'md-model-version': version || modelVersion,
|
||||
'owner-display-name': '',
|
||||
'owner-id': '',
|
||||
'cache-control': '',
|
||||
|
@ -26,7 +69,6 @@ module.exports = class ObjectMD {
|
|||
'expires': '',
|
||||
'content-length': 0,
|
||||
'content-type': '',
|
||||
'last-modified': now,
|
||||
'content-md5': '',
|
||||
// simple/no version. will expand once object versioning is
|
||||
// introduced
|
||||
|
@ -48,7 +90,7 @@ module.exports = class ObjectMD {
|
|||
READ_ACP: [],
|
||||
},
|
||||
'key': '',
|
||||
'location': [],
|
||||
'location': null,
|
||||
'isNull': '',
|
||||
'nullVersionId': '',
|
||||
'isDeleteMarker': '',
|
||||
|
@ -60,18 +102,37 @@ module.exports = class ObjectMD {
|
|||
destination: '',
|
||||
storageClass: '',
|
||||
role: '',
|
||||
storageType: '',
|
||||
dataStoreVersionId: '',
|
||||
},
|
||||
'dataStoreName': '',
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns metadata model version
|
||||
*
|
||||
* @return {number} Metadata model version
|
||||
*/
|
||||
getModelVersion() {
|
||||
return this._data['md-model-version'];
|
||||
_updateFromObjectMD(objMd) {
|
||||
// We only duplicate selected attributes here, where setters
|
||||
// allow to change inner values, and let the others as shallow
|
||||
// copies. Since performance is a concern, we want to avoid
|
||||
// the JSON.parse(JSON.stringify()) method.
|
||||
|
||||
Object.assign(this._data, objMd._data);
|
||||
Object.assign(this._data.replicationInfo,
|
||||
objMd._data.replicationInfo);
|
||||
}
|
||||
|
||||
_updateFromParsedJSON(objMd) {
|
||||
// objMd is a new JS object created for the purpose, it's safe
|
||||
// to just assign its top-level properties.
|
||||
|
||||
Object.assign(this._data, objMd);
|
||||
this._convertToLatestModel();
|
||||
}
|
||||
|
||||
_convertToLatestModel() {
|
||||
// handle backward-compat stuff
|
||||
if (typeof(this._data.location) === 'string') {
|
||||
this.setLocation([{ key: this._data.location }]);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -462,21 +523,53 @@ module.exports = class ObjectMD {
|
|||
/**
|
||||
* Set location
|
||||
*
|
||||
* @param {string[]} location - location
|
||||
* @param {object[]} location - array of data locations (see
|
||||
* constructor of {@link ObjectMDLocation} for a description of
|
||||
* fields for each array object)
|
||||
* @return {ObjectMD} itself
|
||||
*/
|
||||
setLocation(location) {
|
||||
if (!Array.isArray(location) || location.length === 0) {
|
||||
this._data.location = null;
|
||||
} else {
|
||||
this._data.location = location;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns location
|
||||
*
|
||||
* @return {string[]} location
|
||||
* @return {object[]} location
|
||||
*/
|
||||
getLocation() {
|
||||
return this._data.location;
|
||||
const { location } = this._data;
|
||||
return Array.isArray(location) ? location : [];
|
||||
}
|
||||
|
||||
// Object metadata may contain multiple elements for a single part if
|
||||
// the part was originally copied from another MPU. Here we reduce the
|
||||
// locations array to a single element for each part.
|
||||
getReducedLocations() {
|
||||
const locations = this.getLocation();
|
||||
const reducedLocations = [];
|
||||
let partTotal = 0;
|
||||
for (let i = 0; i < locations.length; i++) {
|
||||
const currPart = new ObjectMDLocation(locations[i]);
|
||||
const currPartNum = currPart.getPartNumber();
|
||||
let nextPartNum = undefined;
|
||||
if (i < locations.length - 1) {
|
||||
const nextPart = new ObjectMDLocation(locations[i + 1]);
|
||||
nextPartNum = nextPart.getPartNumber();
|
||||
}
|
||||
partTotal += currPart.getPartSize();
|
||||
if (currPartNum !== nextPartNum) {
|
||||
currPart.setPartSize(partTotal);
|
||||
reducedLocations.push(currPart.getValue());
|
||||
partTotal = 0;
|
||||
}
|
||||
}
|
||||
return reducedLocations;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -559,6 +652,16 @@ module.exports = class ObjectMD {
|
|||
return this._data.versionId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get metadata versionId value in encoded form (the one visible
|
||||
* to the S3 API user)
|
||||
*
|
||||
* @return {string} The encoded object versionId
|
||||
*/
|
||||
getEncodedVersionId() {
|
||||
return VersionIDUtils.encode(this.getVersionId());
|
||||
}
|
||||
|
||||
/**
|
||||
* Set tags
|
||||
*
|
||||
|
@ -586,14 +689,16 @@ module.exports = class ObjectMD {
|
|||
* @return {ObjectMD} itself
|
||||
*/
|
||||
setReplicationInfo(replicationInfo) {
|
||||
const { status, content, destination, storageClass, role } =
|
||||
replicationInfo;
|
||||
const { status, content, destination, storageClass, role,
|
||||
storageType, dataStoreVersionId } = replicationInfo;
|
||||
this._data.replicationInfo = {
|
||||
status,
|
||||
content,
|
||||
destination,
|
||||
storageClass: storageClass || '',
|
||||
role,
|
||||
storageType: storageType || '',
|
||||
dataStoreVersionId: dataStoreVersionId || '',
|
||||
};
|
||||
return this;
|
||||
}
|
||||
|
@ -607,6 +712,45 @@ module.exports = class ObjectMD {
|
|||
return this._data.replicationInfo;
|
||||
}
|
||||
|
||||
setReplicationStatus(status) {
|
||||
this._data.replicationInfo.status = status;
|
||||
return this;
|
||||
}
|
||||
|
||||
setReplicationDataStoreVersionId(versionId) {
|
||||
this._data.replicationInfo.dataStoreVersionId = versionId;
|
||||
return this;
|
||||
}
|
||||
|
||||
getReplicationDataStoreVersionId() {
|
||||
return this._data.replicationInfo.dataStoreVersionId;
|
||||
}
|
||||
|
||||
getReplicationStatus() {
|
||||
return this._data.replicationInfo.status;
|
||||
}
|
||||
|
||||
getReplicationContent() {
|
||||
return this._data.replicationInfo.content;
|
||||
}
|
||||
|
||||
getReplicationRoles() {
|
||||
return this._data.replicationInfo.role;
|
||||
}
|
||||
|
||||
getReplicationStorageType() {
|
||||
return this._data.replicationInfo.storageType;
|
||||
}
|
||||
|
||||
getReplicationStorageClass() {
|
||||
return this._data.replicationInfo.storageClass;
|
||||
}
|
||||
|
||||
getReplicationTargetBucket() {
|
||||
const destBucketArn = this._data.replicationInfo.destination;
|
||||
return destBucketArn.split(':').slice(-1)[0];
|
||||
}
|
||||
|
||||
/**
|
||||
* Set dataStoreName
|
||||
*
|
||||
|
@ -627,6 +771,19 @@ module.exports = class ObjectMD {
|
|||
return this._data.dataStoreName;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get dataStoreVersionId
|
||||
*
|
||||
* @return {string} external backend version id for data
|
||||
*/
|
||||
getDataStoreVersionId() {
|
||||
const location = this.getLocation();
|
||||
if (!location[0]) {
|
||||
return undefined;
|
||||
}
|
||||
return location[0].dataStoreVersionId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set custom meta headers
|
||||
*
|
||||
|
@ -665,4 +822,6 @@ module.exports = class ObjectMD {
|
|||
getValue() {
|
||||
return this._data;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = ObjectMD;
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
/**
|
||||
* Helper class to ease access to a single data location in metadata
|
||||
* 'location' array
|
||||
*/
|
||||
class ObjectMDLocation {
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @param {object} locationObj - single data location info
|
||||
* @param {string} locationObj.key - data backend key
|
||||
* @param {number} locationObj.start - index of first data byte of
|
||||
* this part in the full object
|
||||
* @param {number} locationObj.size - byte length of data part
|
||||
* @param {string} locationObj.dataStoreName - type of data store
|
||||
* @param {string} locationObj.dataStoreETag - internal ETag of
|
||||
* data part
|
||||
*/
|
||||
constructor(locationObj) {
|
||||
this._data = {
|
||||
key: locationObj.key,
|
||||
start: locationObj.start,
|
||||
size: locationObj.size,
|
||||
dataStoreName: locationObj.dataStoreName,
|
||||
dataStoreETag: locationObj.dataStoreETag,
|
||||
};
|
||||
}
|
||||
|
||||
getKey() {
|
||||
return this._data.key;
|
||||
}
|
||||
|
||||
getDataStoreName() {
|
||||
return this._data.dataStoreName;
|
||||
}
|
||||
|
||||
setDataLocation(location) {
|
||||
this._data.key = location.key;
|
||||
this._data.dataStoreName = location.dataStoreName;
|
||||
return this;
|
||||
}
|
||||
|
||||
getDataStoreETag() {
|
||||
return this._data.dataStoreETag;
|
||||
}
|
||||
|
||||
getPartNumber() {
|
||||
return Number.parseInt(this._data.dataStoreETag.split(':')[0], 10);
|
||||
}
|
||||
|
||||
getPartETag() {
|
||||
return this._data.dataStoreETag.split(':')[1];
|
||||
}
|
||||
|
||||
getPartStart() {
|
||||
return this._data.start;
|
||||
}
|
||||
|
||||
getPartSize() {
|
||||
return this._data.size;
|
||||
}
|
||||
|
||||
setPartSize(size) {
|
||||
this._data.size = size;
|
||||
return this;
|
||||
}
|
||||
|
||||
getValue() {
|
||||
return this._data;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ObjectMDLocation;
|
|
@ -58,6 +58,8 @@ class ReplicationConfiguration {
|
|||
this._role = null;
|
||||
this._destination = null;
|
||||
this._rules = null;
|
||||
this._prevStorageClass = null;
|
||||
this._isExternalLocation = null;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -148,11 +150,16 @@ class ReplicationConfiguration {
|
|||
}
|
||||
const role = parsedRole[0];
|
||||
const rolesArr = role.split(',');
|
||||
if (rolesArr.length !== 2) {
|
||||
if (!this._isExternalLocation && rolesArr.length !== 2) {
|
||||
return errors.InvalidArgument.customizeDescription(
|
||||
'Invalid Role specified in replication configuration: ' +
|
||||
'Role must be a comma-separated list of two IAM roles');
|
||||
}
|
||||
if (this._isExternalLocation && rolesArr.length > 1) {
|
||||
return errors.InvalidArgument.customizeDescription(
|
||||
'Invalid Role specified in replication configuration: ' +
|
||||
'Role may not contain a comma separator');
|
||||
}
|
||||
const invalidRole = rolesArr.find(r => !this._isValidRoleARN(r));
|
||||
if (invalidRole !== undefined) {
|
||||
return errors.InvalidArgument.customizeDescription(
|
||||
|
@ -268,22 +275,6 @@ class ReplicationConfiguration {
|
|||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the `StorageClass` is a valid class
|
||||
* @param {string} storageClass - The storage class to validate
|
||||
* @return {boolean} `true` if valid, otherwise `false`
|
||||
*/
|
||||
_isValidStorageClass(storageClass) {
|
||||
if (!this._config) {
|
||||
return validStorageClasses.includes(storageClass);
|
||||
}
|
||||
|
||||
const replicationEndpoints = this._config.replicationEndpoints
|
||||
.map(endpoint => endpoint.site);
|
||||
return replicationEndpoints.includes(storageClass) ||
|
||||
validStorageClasses.includes(storageClass);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the `StorageClass` property is valid
|
||||
* @param {object} destination - The destination object from this._parsedXML
|
||||
|
@ -292,9 +283,28 @@ class ReplicationConfiguration {
|
|||
_parseStorageClass(destination) {
|
||||
const storageClass = destination.StorageClass &&
|
||||
destination.StorageClass[0];
|
||||
if (!this._isValidStorageClass(storageClass)) {
|
||||
if (!this._config) {
|
||||
return validStorageClasses.includes(storageClass);
|
||||
}
|
||||
const replicationEndpoints = this._config.replicationEndpoints
|
||||
.map(endpoint => endpoint.site);
|
||||
const locationConstraints =
|
||||
Object.keys(this._config.locationConstraints);
|
||||
if (locationConstraints.includes(storageClass)) {
|
||||
if (this._prevStorageClass !== null &&
|
||||
this._prevStorageClass !== storageClass) {
|
||||
return errors.InvalidRequest.customizeDescription(
|
||||
'The storage class must be same for all rules when ' +
|
||||
'replicating objects to an external location');
|
||||
}
|
||||
this._isExternalLocation = true;
|
||||
}
|
||||
if (!replicationEndpoints.includes(storageClass) &&
|
||||
!locationConstraints.includes(storageClass) &&
|
||||
!validStorageClasses.includes(storageClass)) {
|
||||
return errors.MalformedXML;
|
||||
}
|
||||
this._prevStorageClass = storageClass;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
|
@ -359,11 +369,11 @@ class ReplicationConfiguration {
|
|||
* @return {undefined}
|
||||
*/
|
||||
parseConfiguration() {
|
||||
const err = this._parseRole() || this._parseRules();
|
||||
const err = this._parseRules();
|
||||
if (err) {
|
||||
return err;
|
||||
}
|
||||
return undefined;
|
||||
return this._parseRole();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -34,8 +34,6 @@ class RoundRobin {
|
|||
throw new Error(
|
||||
'at least one host must be provided for round robin');
|
||||
}
|
||||
this.hostsList = hostsList.map(item => this._validateHostObj(item));
|
||||
|
||||
if (options && options.logger) {
|
||||
this.logger = options.logger;
|
||||
}
|
||||
|
@ -44,6 +42,11 @@ class RoundRobin {
|
|||
} else {
|
||||
this.stickyCount = DEFAULT_STICKY_COUNT;
|
||||
}
|
||||
if (options && options.defaultPort) {
|
||||
this.defaultPort = Number.parseInt(options.defaultPort, 10);
|
||||
}
|
||||
|
||||
this.hostsList = hostsList.map(item => this._validateHostObj(item));
|
||||
|
||||
// TODO: add blacklisting capability
|
||||
|
||||
|
@ -90,7 +93,8 @@ class RoundRobin {
|
|||
port: parsedPort,
|
||||
};
|
||||
}
|
||||
return { host: hostItemObj.host };
|
||||
return { host: hostItemObj.host,
|
||||
port: this.defaultPort };
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -176,7 +176,7 @@ class RESTServer extends httpServer {
|
|||
throw errors.MissingContentLength;
|
||||
}
|
||||
size = Number.parseInt(contentLength, 10);
|
||||
if (isNaN(size)) {
|
||||
if (Number.isNaN(size)) {
|
||||
throw errors.InvalidInput.customizeDescription(
|
||||
'bad Content-Length');
|
||||
}
|
||||
|
|
|
@ -596,7 +596,7 @@ function objectStreamToJSON(rstream, wstream, cb) {
|
|||
streamRPCJSONObj = function _streamRPCJSONObj(obj, wstream, cb) {
|
||||
const cbOnce = jsutil.once(cb);
|
||||
|
||||
if (typeof(obj) === 'object') {
|
||||
if (typeof obj === 'object') {
|
||||
if (obj && obj.pipe !== undefined) {
|
||||
// stream object streams as JSON arrays
|
||||
return objectStreamToJSON(obj, wstream, cbOnce);
|
||||
|
@ -734,7 +734,7 @@ function RESTServer(params) {
|
|||
* @return {undefined}
|
||||
*/
|
||||
httpServer.registerServices = function registerServices(...serviceList) {
|
||||
this.serviceList.push.apply(this.serviceList, serviceList);
|
||||
this.serviceList.push(...serviceList);
|
||||
};
|
||||
|
||||
return httpServer;
|
||||
|
|
|
@ -134,8 +134,7 @@ class SIOInputStream extends stream.Readable {
|
|||
if (this._destroyed) {
|
||||
return;
|
||||
}
|
||||
this._readState.pushBuffer.push.apply(this._readState.pushBuffer,
|
||||
data);
|
||||
this._readState.pushBuffer.push(...data);
|
||||
if (this._readState.readable) {
|
||||
this._pushData();
|
||||
}
|
||||
|
@ -260,9 +259,9 @@ class SIOStreamSocket {
|
|||
return arg;
|
||||
}
|
||||
const log = this.logger;
|
||||
const isReadStream = (typeof(arg.pipe) === 'function'
|
||||
const isReadStream = (typeof arg.pipe === 'function'
|
||||
&& typeof (arg.read) === 'function');
|
||||
let isWriteStream = (typeof(arg.write) === 'function');
|
||||
let isWriteStream = (typeof arg.write === 'function');
|
||||
|
||||
if (isReadStream || isWriteStream) {
|
||||
if (isReadStream && isWriteStream) {
|
||||
|
@ -303,7 +302,7 @@ class SIOStreamSocket {
|
|||
}
|
||||
return encodedStream;
|
||||
}
|
||||
if (typeof(arg) === 'object') {
|
||||
if (typeof arg === 'object') {
|
||||
let encodedObj;
|
||||
if (Array.isArray(arg)) {
|
||||
encodedObj = [];
|
||||
|
@ -377,7 +376,7 @@ class SIOStreamSocket {
|
|||
});
|
||||
return stream;
|
||||
}
|
||||
if (typeof(arg) === 'object') {
|
||||
if (typeof arg === 'object') {
|
||||
let decodedObj;
|
||||
if (Array.isArray(arg)) {
|
||||
decodedObj = [];
|
||||
|
|
|
@ -91,6 +91,10 @@ const _actionMapSSO = {
|
|||
SsoAuthorize: 'sso:Authorize',
|
||||
};
|
||||
|
||||
const _actionMapSTS = {
|
||||
assumeRole: 'sts:AssumeRole',
|
||||
};
|
||||
|
||||
function _findAction(service, method) {
|
||||
if (service === 's3') {
|
||||
return _actionMap[method];
|
||||
|
@ -108,6 +112,9 @@ function _findAction(service, method) {
|
|||
// currently only method is ListMetrics
|
||||
return `utapi:${method}`;
|
||||
}
|
||||
if (service === 'sts') {
|
||||
return _actionMapSTS[method];
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
|
@ -123,13 +130,17 @@ function _buildArn(service, generalResource, specificResource, requesterInfo) {
|
|||
}
|
||||
return 'arn:aws:s3:::';
|
||||
}
|
||||
if (service === 'iam') {
|
||||
if (service === 'iam' || service === 'sts') {
|
||||
// arn:aws:iam::<account-id>:<resource-type><resource>
|
||||
let accountId = requesterInfo.accountid;
|
||||
if (service === 'sts') {
|
||||
accountId = requesterInfo.targetAccountId;
|
||||
}
|
||||
if (specificResource) {
|
||||
return `arn:aws:iam::${requesterInfo.accountid}:` +
|
||||
return `arn:aws:iam::${accountId}:` +
|
||||
`${generalResource}${specificResource}`;
|
||||
}
|
||||
return `arn:aws:iam::${requesterInfo.accountid}:${generalResource}`;
|
||||
return `arn:aws:iam::${accountId}:${generalResource}`;
|
||||
}
|
||||
if (service === 'ring') {
|
||||
// arn:aws:iam::<account-id>:<resource-type><resource>
|
||||
|
@ -177,6 +188,7 @@ function _buildArn(service, generalResource, specificResource, requesterInfo) {
|
|||
* @param {string} authType - type of authentication used
|
||||
* @param {number} signatureAge - age of signature in milliseconds
|
||||
* @param {string} securityToken - auth security token (temporary credentials)
|
||||
* @param {string} policyArn - policy arn
|
||||
* @return {RequestContext} a RequestContext instance
|
||||
*/
|
||||
|
||||
|
@ -184,7 +196,8 @@ class RequestContext {
|
|||
constructor(headers, query, generalResource, specificResource,
|
||||
requesterIp, sslEnabled, apiMethod,
|
||||
awsService, locationConstraint, requesterInfo,
|
||||
signatureVersion, authType, signatureAge, securityToken) {
|
||||
signatureVersion, authType, signatureAge, securityToken, policyArn,
|
||||
action) {
|
||||
this._headers = headers;
|
||||
this._query = query;
|
||||
this._requesterIp = requesterIp;
|
||||
|
@ -210,7 +223,8 @@ class RequestContext {
|
|||
this._authType = authType;
|
||||
this._signatureAge = signatureAge;
|
||||
this._securityToken = securityToken;
|
||||
|
||||
this._policyArn = policyArn;
|
||||
this._action = action;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -237,6 +251,8 @@ class RequestContext {
|
|||
locationConstraint: this._locationConstraint,
|
||||
tokenIssueTime: this._tokenIssueTime,
|
||||
securityToken: this._securityToken,
|
||||
policyArn: this._policyArn,
|
||||
action: this._action,
|
||||
};
|
||||
return JSON.stringify(requestInfo);
|
||||
}
|
||||
|
@ -257,7 +273,8 @@ class RequestContext {
|
|||
obj.specificResource, obj.requesterIp, obj.sslEnabled,
|
||||
obj.apiMethod, obj.awsService, obj.locationConstraint,
|
||||
obj.requesterInfo, obj.signatureVersion,
|
||||
obj.authType, obj.signatureAge, obj.securityToken);
|
||||
obj.authType, obj.signatureAge, obj.securityToken, obj.policyArn,
|
||||
obj.action);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -265,6 +282,9 @@ class RequestContext {
|
|||
* @return {string} action
|
||||
*/
|
||||
getAction() {
|
||||
if (this._action) {
|
||||
return this._action;
|
||||
}
|
||||
if (this._foundAction) {
|
||||
return this._foundAction;
|
||||
}
|
||||
|
@ -355,6 +375,26 @@ class RequestContext {
|
|||
return parseIp(this._requesterIp);
|
||||
}
|
||||
|
||||
getRequesterAccountId() {
|
||||
return this._requesterInfo.accountid;
|
||||
}
|
||||
|
||||
getRequesterEndArn() {
|
||||
return this._requesterInfo.arn;
|
||||
}
|
||||
|
||||
getRequesterExternalId() {
|
||||
return this._requesterInfo.externalId;
|
||||
}
|
||||
|
||||
getRequesterPrincipalArn() {
|
||||
return this._requesterInfo.parentArn || this._requesterInfo.arn;
|
||||
}
|
||||
|
||||
getRequesterType() {
|
||||
return this._requesterInfo.principalType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set sslEnabled
|
||||
* @param {boolean} sslEnabled - true if https used
|
||||
|
@ -548,6 +588,26 @@ class RequestContext {
|
|||
this._securityToken = token;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the policy arn
|
||||
*
|
||||
* @return {string} policyArn - Policy arn
|
||||
*/
|
||||
getPolicyArn() {
|
||||
return this._policyArn;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the policy arn
|
||||
*
|
||||
* @param {string} policyArn - Policy arn
|
||||
* @return {RequestContext} itself
|
||||
*/
|
||||
setPolicyArn(policyArn) {
|
||||
this._policyArn = policyArn;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = RequestContext;
|
||||
|
|
|
@ -98,7 +98,7 @@ function isActionApplicable(requestAction, statementAction, log) {
|
|||
* @param {Object} log - logger
|
||||
* @return {boolean} true if meet conditions, false if not
|
||||
*/
|
||||
function meetConditions(requestContext, statementCondition, log) {
|
||||
evaluators.meetConditions = (requestContext, statementCondition, log) => {
|
||||
// The Condition portion of a policy is an object with different
|
||||
// operators as keys
|
||||
const operators = Object.keys(statementCondition);
|
||||
|
@ -119,8 +119,7 @@ function meetConditions(requestContext, statementCondition, log) {
|
|||
const conditionsWithSameOperator = statementCondition[operator];
|
||||
const conditionKeys = Object.keys(conditionsWithSameOperator);
|
||||
const conditionKeysLength = conditionKeys.length;
|
||||
for (let j = 0; j < conditionKeysLength;
|
||||
j ++) {
|
||||
for (let j = 0; j < conditionKeysLength; j++) {
|
||||
const key = conditionKeys[j];
|
||||
let value = conditionsWithSameOperator[key];
|
||||
if (!Array.isArray(value)) {
|
||||
|
@ -171,7 +170,7 @@ function meetConditions(requestContext, statementCondition, log) {
|
|||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Evaluate whether a request is permitted under a policy.
|
||||
|
@ -222,7 +221,8 @@ evaluators.evaluatePolicy = (requestContext, policy, log) => {
|
|||
continue;
|
||||
}
|
||||
// If do not meet conditions move on to next statement
|
||||
if (currentStatement.Condition && !meetConditions(requestContext,
|
||||
if (currentStatement.Condition &&
|
||||
!evaluators.meetConditions(requestContext,
|
||||
currentStatement.Condition, log)) {
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,176 @@
|
|||
const { meetConditions } = require('./evaluator');
|
||||
|
||||
/**
|
||||
* Class with methods to manage the policy 'principal' validation
|
||||
*/
|
||||
class Principal {
|
||||
/**
|
||||
* Function to evaluate conditions if needed
|
||||
*
|
||||
* @param {object} params - Evaluation parameters
|
||||
* @param {object} statement - Statement policy field
|
||||
* @return {boolean} True if meet conditions
|
||||
*/
|
||||
static _evaluateCondition(params, statement) {
|
||||
if (statement.Condition) {
|
||||
return meetConditions(params.rc, statement.Condition, params.log);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks principal field against valid principals array
|
||||
*
|
||||
* @param {object} params - Evaluation parameters
|
||||
* @param {object} statement - Statement policy field
|
||||
* @param {object} valids - Valid principal fields
|
||||
* @return {string} result of principal evaluation, either 'Neutral',
|
||||
* 'Allow' or 'Deny'
|
||||
*/
|
||||
static _evaluatePrincipalField(params, statement, valids) {
|
||||
const reverse = !!statement.NotPrincipal;
|
||||
const principal = statement.Principal || statement.NotPrincipal;
|
||||
if (typeof principal === 'string' && principal === '*') {
|
||||
if (reverse) {
|
||||
// In case of anonymous NotPrincipal, this will neutral everyone
|
||||
return 'Neutral';
|
||||
}
|
||||
if (!Principal._evaluateCondition(params, statement)) {
|
||||
return 'Neutral';
|
||||
}
|
||||
return statement.Effect;
|
||||
} else if (typeof principal === 'string') {
|
||||
return 'Deny';
|
||||
}
|
||||
let ref = [];
|
||||
let toCheck = [];
|
||||
if (valids.Federated && principal.Federated) {
|
||||
ref = valids.Federated;
|
||||
toCheck = principal.Federated;
|
||||
} else if (valids.AWS && principal.AWS) {
|
||||
ref = valids.AWS;
|
||||
toCheck = principal.AWS;
|
||||
} else if (valids.Service && principal.Service) {
|
||||
ref = valids.Service;
|
||||
toCheck = principal.Service;
|
||||
} else {
|
||||
if (reverse) {
|
||||
return statement.Effect;
|
||||
}
|
||||
return 'Neutral';
|
||||
}
|
||||
toCheck = Array.isArray(toCheck) ? toCheck : [toCheck];
|
||||
ref = Array.isArray(ref) ? ref : [ref];
|
||||
if (toCheck.indexOf('*') !== -1) {
|
||||
if (reverse) {
|
||||
return 'Neutral';
|
||||
}
|
||||
if (!Principal._evaluateCondition(params, statement)) {
|
||||
return 'Neutral';
|
||||
}
|
||||
return statement.Effect;
|
||||
}
|
||||
const len = ref.length;
|
||||
for (let i = 0; i < len; ++i) {
|
||||
if (toCheck.indexOf(ref[i]) !== -1) {
|
||||
if (reverse) {
|
||||
return 'Neutral';
|
||||
}
|
||||
if (!Principal._evaluateCondition(params, statement)) {
|
||||
return 'Neutral';
|
||||
}
|
||||
return statement.Effect;
|
||||
}
|
||||
}
|
||||
if (reverse) {
|
||||
return statement.Effect;
|
||||
}
|
||||
return 'Neutral';
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to evaluate principal of statements against a valid principal
|
||||
* array
|
||||
*
|
||||
* @param {object} params - Evaluation parameters
|
||||
* @param {object} valids - Valid principal fields
|
||||
* @return {string} result of principal evaluation, either 'Allow' or 'Deny'
|
||||
*/
|
||||
static _evaluatePrincipal(params, valids) {
|
||||
const doc = params.trustedPolicy;
|
||||
let statements = doc.Statement;
|
||||
if (!Array.isArray(statements)) {
|
||||
statements = [statements];
|
||||
}
|
||||
const len = statements.length;
|
||||
let authorized = 'Deny';
|
||||
for (let i = 0; i < len; ++i) {
|
||||
const statement = statements[i];
|
||||
const result = Principal._evaluatePrincipalField(params,
|
||||
statement, valids);
|
||||
if (result === 'Deny') {
|
||||
return 'Deny';
|
||||
} else if (result === 'Allow') {
|
||||
authorized = 'Allow';
|
||||
}
|
||||
}
|
||||
return authorized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to evaluate principal for a policy
|
||||
*
|
||||
* @param {object} params - Evaluation parameters
|
||||
* @return {object} {
|
||||
* result: 'Allow' or 'Deny',
|
||||
* checkAction: true or false,
|
||||
* }
|
||||
*/
|
||||
static evaluatePrincipal(params) {
|
||||
let valids = null;
|
||||
let checkAction = false;
|
||||
const account = params.rc.getRequesterAccountId();
|
||||
const targetAccount = params.targetAccountId;
|
||||
const accountArn = `arn:aws:iam::${account}:root`;
|
||||
const requesterArn = params.rc.getRequesterPrincipalArn();
|
||||
const requesterEndArn = params.rc.getRequesterEndArn();
|
||||
const requesterType = params.rc.getRequesterType();
|
||||
if (account !== targetAccount) {
|
||||
valids = {
|
||||
AWS: [
|
||||
account,
|
||||
accountArn,
|
||||
],
|
||||
};
|
||||
checkAction = true;
|
||||
} else {
|
||||
if (requesterType === 'User' || requesterType === 'AssumedRole' ||
|
||||
requesterType === 'Federated') {
|
||||
valids = {
|
||||
AWS: [
|
||||
account,
|
||||
accountArn,
|
||||
],
|
||||
};
|
||||
if (requesterType === 'User' ||
|
||||
requesterType === 'AssumedRole') {
|
||||
valids.AWS.push(requesterArn);
|
||||
if (requesterEndArn !== requesterArn) {
|
||||
valids.AWS.push(requesterEndArn);
|
||||
}
|
||||
} else {
|
||||
valids.Federated = [requesterArn];
|
||||
}
|
||||
} else if (requesterType === 'Service') {
|
||||
valids = { Service: requesterArn };
|
||||
}
|
||||
}
|
||||
const result = Principal._evaluatePrincipal(params, valids);
|
||||
return {
|
||||
result,
|
||||
checkAction,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Principal;
|
|
@ -14,8 +14,7 @@ const handleWildcardInResource =
|
|||
*/
|
||||
function checkArnMatch(policyArn, requestRelativeId, requestArnArr,
|
||||
caseSensitive) {
|
||||
let regExofArn = handleWildcardInResource(policyArn);
|
||||
regExofArn = caseSensitive ? regExofArn : regExofArn.toLowerCase();
|
||||
const regExofArn = handleWildcardInResource(policyArn);
|
||||
// The relativeId is the last part of the ARN (for instance, a bucket and
|
||||
// object name in S3)
|
||||
// Join on ":" in case there were ":" in the relativeID at the end
|
||||
|
|
|
@ -144,6 +144,8 @@ conditions.findConditionKey = (key, requestContext) => {
|
|||
// header
|
||||
map.set('s3:ObjLocationConstraint',
|
||||
headers['x-amz-meta-scal-location-constraint']);
|
||||
map.set('sts:ExternalId', requestContext.getRequesterExternalId());
|
||||
map.set('iam:PolicyArn', requestContext.getPolicyArn());
|
||||
return map.get(key);
|
||||
};
|
||||
|
||||
|
|
|
@ -0,0 +1,83 @@
|
|||
const EventEmitter = require('events');
|
||||
|
||||
/**
|
||||
* Class to collect results of streaming subparts.
|
||||
* Emits "done" event when streaming is complete and Azure has returned
|
||||
* results for putting each of the subparts
|
||||
* Emits "error" event if Azure returns an error for putting a subpart and
|
||||
* streaming is in-progress
|
||||
* @class ResultsCollector
|
||||
*/
|
||||
class ResultsCollector extends EventEmitter {
|
||||
/**
|
||||
* @constructor
|
||||
*/
|
||||
constructor() {
|
||||
super();
|
||||
this._results = [];
|
||||
this._queue = 0;
|
||||
this._streamingFinished = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* ResultsCollector.pushResult - register result of putting one subpart
|
||||
* and emit "done" or "error" events if appropriate
|
||||
* @param {(Error|undefined)} err - error returned from Azure after
|
||||
* putting a subpart
|
||||
* @param {number} subPartIndex - the index of the subpart
|
||||
* @emits ResultCollector#done
|
||||
* @emits ResultCollector#error
|
||||
* @return {undefined}
|
||||
*/
|
||||
pushResult(err, subPartIndex) {
|
||||
this._results.push({
|
||||
error: err,
|
||||
subPartIndex,
|
||||
});
|
||||
this._queue--;
|
||||
if (this._resultsComplete()) {
|
||||
this.emit('done', err, this._results);
|
||||
} else if (err) {
|
||||
this.emit('error', err, subPartIndex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* ResultsCollector.pushOp - register operation to put another subpart
|
||||
* @return {undefined};
|
||||
*/
|
||||
pushOp() {
|
||||
this._queue++;
|
||||
}
|
||||
|
||||
/**
|
||||
* ResultsCollector.enableComplete - register streaming has finished,
|
||||
* allowing ResultCollector#done event to be emitted when last result
|
||||
* has been returned
|
||||
* @return {undefined};
|
||||
*/
|
||||
enableComplete() {
|
||||
this._streamingFinished = true;
|
||||
}
|
||||
|
||||
_resultsComplete() {
|
||||
return (this._queue === 0 && this._streamingFinished);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* "done" event
|
||||
* @event ResultCollector#done
|
||||
* @type {(Error|undefined)} err - error returned by Azure putting last subpart
|
||||
* @type {object[]} results - result for putting each of the subparts
|
||||
* @property {Error} [results[].error] - error returned by Azure putting subpart
|
||||
* @property {number} results[].subPartIndex - index of the subpart
|
||||
*/
|
||||
/**
|
||||
* "error" event
|
||||
* @event ResultCollector#error
|
||||
* @type {(Error|undefined)} error - error returned by Azure last subpart
|
||||
* @type {number} subPartIndex - index of the subpart
|
||||
*/
|
||||
|
||||
module.exports = ResultsCollector;
|
|
@ -0,0 +1,126 @@
|
|||
const stream = require('stream');
|
||||
|
||||
/**
|
||||
* Interface for streaming subparts.
|
||||
* @class SubStreamInterface
|
||||
*/
|
||||
class SubStreamInterface {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {stream.Readable} sourceStream - stream to read for data
|
||||
*/
|
||||
constructor(sourceStream) {
|
||||
this._sourceStream = sourceStream;
|
||||
this._totalLengthCounter = 0;
|
||||
this._lengthCounter = 0;
|
||||
this._subPartIndex = 0;
|
||||
this._currentStream = new stream.PassThrough();
|
||||
}
|
||||
|
||||
/**
|
||||
* SubStreamInterface.pauseStreaming - pause data flow
|
||||
* @return {undefined}
|
||||
*/
|
||||
pauseStreaming() {
|
||||
this._sourceStream.pause();
|
||||
}
|
||||
|
||||
/**
|
||||
* SubStreamInterface.resumeStreaming - resume data flow
|
||||
* @return {undefined}
|
||||
*/
|
||||
resumeStreaming() {
|
||||
this._sourceStream.resume();
|
||||
}
|
||||
|
||||
/**
|
||||
* SubStreamInterface.endStreaming - signal end of data for last stream,
|
||||
* to be called when source stream has ended
|
||||
* @return {undefined}
|
||||
*/
|
||||
endStreaming() {
|
||||
this._totalLengthCounter += this._lengthCounter;
|
||||
this._currentStream.end();
|
||||
}
|
||||
|
||||
/**
|
||||
* SubStreamInterface.stopStreaming - destroy streams,
|
||||
* to be called when streaming must be stopped externally
|
||||
* @param {stream.Readable} [piper] - a stream that is piping data into
|
||||
* source stream
|
||||
* @return {undefined}
|
||||
*/
|
||||
stopStreaming(piper) {
|
||||
if (piper) {
|
||||
piper.unpipe();
|
||||
piper.destroy();
|
||||
}
|
||||
this._sourceStream.destroy();
|
||||
this._currentStream.destroy();
|
||||
}
|
||||
|
||||
/**
|
||||
* SubStreamInterface.getLengthCounter - return length of bytes streamed
|
||||
* for current subpart
|
||||
* @return {number} - this._lengthCounter
|
||||
*/
|
||||
getLengthCounter() {
|
||||
return this._lengthCounter;
|
||||
}
|
||||
|
||||
/**
|
||||
* SubStreamInterface.getTotalBytesStreamed - return total bytes streamed
|
||||
* @return {number} - this._totalLengthCounter
|
||||
*/
|
||||
getTotalBytesStreamed() {
|
||||
return this._totalLengthCounter;
|
||||
}
|
||||
|
||||
/**
|
||||
* SubStreamInterface.getCurrentStream - return subpart stream currently
|
||||
* being written to from source stream
|
||||
* @return {number} - this._currentStream
|
||||
*/
|
||||
getCurrentStream() {
|
||||
return this._currentStream;
|
||||
}
|
||||
|
||||
/**
|
||||
* SubStreamInterface.transitionToNextStream - signal end of data for
|
||||
* current stream, generate a new stream and start streaming to new stream
|
||||
* @return {object} - return object containing new current stream and
|
||||
* subpart index of current subpart
|
||||
*/
|
||||
transitionToNextStream() {
|
||||
this.pauseStreaming();
|
||||
this._currentStream.end();
|
||||
this._totalLengthCounter += this._lengthCounter;
|
||||
this._lengthCounter = 0;
|
||||
this._subPartIndex++;
|
||||
this._currentStream = new stream.PassThrough();
|
||||
this.resumeStreaming();
|
||||
return {
|
||||
nextStream: this._currentStream,
|
||||
subPartIndex: this._subPartIndex,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* SubStreamInterface.write - write to the current stream
|
||||
* @param {Buffer} chunk - a chunk of data
|
||||
* @return {undefined}
|
||||
*/
|
||||
write(chunk) {
|
||||
const ready = this._currentStream.write(chunk);
|
||||
|
||||
if (!ready) {
|
||||
this.pauseStreaming();
|
||||
this._currentStream.once('drain', () => {
|
||||
this.resumeStreaming();
|
||||
});
|
||||
}
|
||||
this._lengthCounter += chunk.length;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = SubStreamInterface;
|
|
@ -0,0 +1,224 @@
|
|||
const crypto = require('crypto');
|
||||
const stream = require('stream');
|
||||
|
||||
const ResultsCollector = require('./ResultsCollector');
|
||||
const SubStreamInterface = require('./SubStreamInterface');
|
||||
const objectUtils = require('../objectUtils');
|
||||
const MD5Sum = require('../MD5Sum');
|
||||
const errors = require('../../errors');
|
||||
|
||||
const azureMpuUtils = {};
|
||||
|
||||
azureMpuUtils.splitter = '|';
|
||||
azureMpuUtils.overviewMpuKey = 'azure_mpu';
|
||||
azureMpuUtils.maxSubPartSize = 104857600;
|
||||
azureMpuUtils.zeroByteETag = crypto.createHash('md5').update('').digest('hex');
|
||||
|
||||
|
||||
azureMpuUtils.padString = (str, category) => {
|
||||
const _padFn = {
|
||||
left: (str, padString) =>
|
||||
`${padString}${str}`.substr(-padString.length),
|
||||
right: (str, padString) =>
|
||||
`${str}${padString}`.substr(0, padString.length),
|
||||
};
|
||||
// It's a little more performant if we add pre-generated strings for each
|
||||
// type of padding we want to apply, instead of using string.repeat() to
|
||||
// create the padding.
|
||||
const padSpec = {
|
||||
partNumber: {
|
||||
padString: '00000',
|
||||
direction: 'left',
|
||||
},
|
||||
subPart: {
|
||||
padString: '00',
|
||||
direction: 'left',
|
||||
},
|
||||
part: {
|
||||
padString:
|
||||
'%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%',
|
||||
direction: 'right',
|
||||
},
|
||||
};
|
||||
const { direction, padString } = padSpec[category];
|
||||
return _padFn[direction](str, padString);
|
||||
};
|
||||
|
||||
// NOTE: If we want to extract the object name from these keys, we will need
|
||||
// to use a similar method to _getKeyAndUploadIdFromMpuKey since the object
|
||||
// name may have instances of the splitter used to delimit arguments
|
||||
azureMpuUtils.getMpuSummaryKey = (objectName, uploadId) =>
|
||||
`${objectName}${azureMpuUtils.splitter}${uploadId}`;
|
||||
|
||||
azureMpuUtils.getBlockId = (uploadId, partNumber, subPartIndex) => {
|
||||
const paddedPartNumber = azureMpuUtils.padString(partNumber, 'partNumber');
|
||||
const paddedSubPart = azureMpuUtils.padString(subPartIndex, 'subPart');
|
||||
const splitter = azureMpuUtils.splitter;
|
||||
const blockId = `${uploadId}${splitter}partNumber${paddedPartNumber}` +
|
||||
`${splitter}subPart${paddedSubPart}${splitter}`;
|
||||
return azureMpuUtils.padString(blockId, 'part');
|
||||
};
|
||||
|
||||
azureMpuUtils.getSummaryPartId = (partNumber, eTag, size) => {
|
||||
const paddedPartNumber = azureMpuUtils.padString(partNumber, 'partNumber');
|
||||
const timestamp = Date.now();
|
||||
const splitter = azureMpuUtils.splitter;
|
||||
const summaryKey = `${paddedPartNumber}${splitter}${timestamp}` +
|
||||
`${splitter}${eTag}${splitter}${size}${splitter}`;
|
||||
return azureMpuUtils.padString(summaryKey, 'part');
|
||||
};
|
||||
|
||||
azureMpuUtils.getSubPartInfo = dataContentLength => {
|
||||
const numberFullSubParts =
|
||||
Math.floor(dataContentLength / azureMpuUtils.maxSubPartSize);
|
||||
const remainder = dataContentLength % azureMpuUtils.maxSubPartSize;
|
||||
const numberSubParts = remainder ?
|
||||
numberFullSubParts + 1 : numberFullSubParts;
|
||||
const lastPartSize = remainder || azureMpuUtils.maxSubPartSize;
|
||||
return {
|
||||
lastPartIndex: numberSubParts - 1,
|
||||
lastPartSize,
|
||||
};
|
||||
};
|
||||
|
||||
azureMpuUtils.getSubPartSize = (subPartInfo, subPartIndex) => {
|
||||
const { lastPartIndex, lastPartSize } = subPartInfo;
|
||||
return subPartIndex === lastPartIndex ?
|
||||
lastPartSize : azureMpuUtils.maxSubPartSize;
|
||||
};
|
||||
|
||||
azureMpuUtils.getSubPartIds = (part, uploadId) =>
|
||||
[...Array(part.numberSubParts).keys()].map(subPartIndex =>
|
||||
azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex));
|
||||
|
||||
azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName,
|
||||
log, cb) => {
|
||||
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
|
||||
= params;
|
||||
const totalSubParts = 1;
|
||||
const blockId = azureMpuUtils.getBlockId(uploadId, partNumber, 0);
|
||||
const passThrough = new stream.PassThrough();
|
||||
const options = {};
|
||||
if (contentMD5) {
|
||||
options.useTransactionalMD5 = true;
|
||||
options.transactionalContentMD5 = contentMD5;
|
||||
}
|
||||
request.pipe(passThrough);
|
||||
return errorWrapperFn('uploadPart', 'createBlockFromStream',
|
||||
[blockId, bucketName, objectKey, passThrough, size, options,
|
||||
(err, result) => {
|
||||
if (err) {
|
||||
log.error('Error from Azure data backend uploadPart',
|
||||
{ error: err.message, dataStoreName });
|
||||
if (err.code === 'ContainerNotFound') {
|
||||
return cb(errors.NoSuchBucket);
|
||||
}
|
||||
if (err.code === 'InvalidMd5') {
|
||||
return cb(errors.InvalidDigest);
|
||||
}
|
||||
if (err.code === 'Md5Mismatch') {
|
||||
return cb(errors.BadDigest);
|
||||
}
|
||||
return cb(errors.InternalError.customizeDescription(
|
||||
`Error returned from Azure: ${err.message}`)
|
||||
);
|
||||
}
|
||||
const eTag = objectUtils.getHexMD5(result.headers['content-md5']);
|
||||
return cb(null, eTag, totalSubParts, size);
|
||||
}], log, cb);
|
||||
};
|
||||
|
||||
azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo,
|
||||
subPartStream, subPartIndex, resultsCollector, log, cb) => {
|
||||
const { uploadId, partNumber, bucketName, objectKey } = partParams;
|
||||
const subPartSize = azureMpuUtils.getSubPartSize(
|
||||
subPartInfo, subPartIndex);
|
||||
const subPartId = azureMpuUtils.getBlockId(uploadId, partNumber,
|
||||
subPartIndex);
|
||||
resultsCollector.pushOp();
|
||||
errorWrapperFn('uploadPart', 'createBlockFromStream',
|
||||
[subPartId, bucketName, objectKey, subPartStream, subPartSize,
|
||||
{}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb);
|
||||
};
|
||||
|
||||
azureMpuUtils.putSubParts = (errorWrapperFn, request, params,
|
||||
dataStoreName, log, cb) => {
|
||||
const subPartInfo = azureMpuUtils.getSubPartInfo(params.size);
|
||||
const resultsCollector = new ResultsCollector();
|
||||
const hashedStream = new MD5Sum();
|
||||
const streamInterface = new SubStreamInterface(hashedStream);
|
||||
log.trace('data length is greater than max subpart size;' +
|
||||
'putting multiple parts');
|
||||
|
||||
resultsCollector.on('error', (err, subPartIndex) => {
|
||||
streamInterface.stopStreaming(request);
|
||||
log.error(`Error putting subpart to Azure: ${subPartIndex}`,
|
||||
{ error: err.message, dataStoreName });
|
||||
if (err.code === 'ContainerNotFound') {
|
||||
return cb(errors.NoSuchBucket);
|
||||
}
|
||||
return cb(errors.InternalError.customizeDescription(
|
||||
`Error returned from Azure: ${err}`));
|
||||
});
|
||||
|
||||
resultsCollector.on('done', (err, results) => {
|
||||
if (err) {
|
||||
log.error('Error putting last subpart to Azure',
|
||||
{ error: err.message, dataStoreName });
|
||||
if (err.code === 'ContainerNotFound') {
|
||||
return cb(errors.NoSuchBucket);
|
||||
}
|
||||
return cb(errors.InternalError.customizeDescription(
|
||||
`Error returned from Azure: ${err}`));
|
||||
}
|
||||
const numberSubParts = results.length;
|
||||
const totalLength = streamInterface.getTotalBytesStreamed();
|
||||
log.trace('successfully put subparts to Azure',
|
||||
{ numberSubParts, totalLength });
|
||||
hashedStream.on('hashed', () => cb(null, hashedStream.completedHash,
|
||||
numberSubParts, totalLength));
|
||||
|
||||
// in case the hashed event was already emitted before the
|
||||
// event handler was registered:
|
||||
if (hashedStream.completedHash) {
|
||||
hashedStream.removeAllListeners('hashed');
|
||||
return cb(null, hashedStream.completedHash, numberSubParts,
|
||||
totalLength);
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
|
||||
const currentStream = streamInterface.getCurrentStream();
|
||||
// start first put to Azure before we start streaming the data
|
||||
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo,
|
||||
currentStream, 0, resultsCollector, log, cb);
|
||||
|
||||
request.pipe(hashedStream);
|
||||
hashedStream.on('end', () => {
|
||||
resultsCollector.enableComplete();
|
||||
streamInterface.endStreaming();
|
||||
});
|
||||
hashedStream.on('data', data => {
|
||||
const currentLength = streamInterface.getLengthCounter();
|
||||
if (currentLength + data.length > azureMpuUtils.maxSubPartSize) {
|
||||
const bytesToMaxSize = azureMpuUtils.maxSubPartSize - currentLength;
|
||||
const firstChunk = bytesToMaxSize === 0 ? data :
|
||||
data.slice(bytesToMaxSize);
|
||||
if (bytesToMaxSize !== 0) {
|
||||
// if we have not streamed full subpart, write enough of the
|
||||
// data chunk to stream the correct length
|
||||
streamInterface.write(data.slice(0, bytesToMaxSize));
|
||||
}
|
||||
const { nextStream, subPartIndex } =
|
||||
streamInterface.transitionToNextStream();
|
||||
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo,
|
||||
nextStream, subPartIndex, resultsCollector, log, cb);
|
||||
streamInterface.write(firstChunk);
|
||||
} else {
|
||||
streamInterface.write(data);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
module.exports = azureMpuUtils;
|
|
@ -0,0 +1,107 @@
|
|||
const querystring = require('querystring');
|
||||
const escapeForXml = require('./escapeForXml');
|
||||
|
||||
const convertMethods = {};
|
||||
|
||||
convertMethods.completeMultipartUpload = xmlParams => {
|
||||
const escapedBucketName = escapeForXml(xmlParams.bucketName);
|
||||
return '<?xml version="1.0" encoding="UTF-8"?>' +
|
||||
'<CompleteMultipartUploadResult ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
`<Location>http://${escapedBucketName}.` +
|
||||
`${escapeForXml(xmlParams.hostname)}/` +
|
||||
`${escapeForXml(xmlParams.objectKey)}</Location>` +
|
||||
`<Bucket>${escapedBucketName}</Bucket>` +
|
||||
`<Key>${escapeForXml(xmlParams.objectKey)}</Key>` +
|
||||
`<ETag>${escapeForXml(xmlParams.eTag)}</ETag>` +
|
||||
'</CompleteMultipartUploadResult>';
|
||||
};
|
||||
|
||||
convertMethods.initiateMultipartUpload = xmlParams =>
|
||||
'<?xml version="1.0" encoding="UTF-8"?>' +
|
||||
'<InitiateMultipartUploadResult ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>` +
|
||||
`<Key>${escapeForXml(xmlParams.objectKey)}</Key>` +
|
||||
`<UploadId>${escapeForXml(xmlParams.uploadId)}</UploadId>` +
|
||||
'</InitiateMultipartUploadResult>';
|
||||
|
||||
convertMethods.listMultipartUploads = xmlParams => {
|
||||
const xml = [];
|
||||
const l = xmlParams.list;
|
||||
|
||||
xml.push('<?xml version="1.0" encoding="UTF-8"?>',
|
||||
'<ListMultipartUploadsResult ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
|
||||
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`
|
||||
);
|
||||
|
||||
// For certain XML elements, if it is `undefined`, AWS returns either an
|
||||
// empty tag or does not include it. Hence the `optional` key in the params.
|
||||
const params = [
|
||||
{ tag: 'KeyMarker', value: xmlParams.keyMarker },
|
||||
{ tag: 'UploadIdMarker', value: xmlParams.uploadIdMarker },
|
||||
{ tag: 'NextKeyMarker', value: l.NextKeyMarker, optional: true },
|
||||
{ tag: 'NextUploadIdMarker', value: l.NextUploadIdMarker,
|
||||
optional: true },
|
||||
{ tag: 'Delimiter', value: l.Delimiter, optional: true },
|
||||
{ tag: 'Prefix', value: xmlParams.prefix, optional: true },
|
||||
];
|
||||
|
||||
params.forEach(param => {
|
||||
if (param.value) {
|
||||
xml.push(`<${param.tag}>${escapeForXml(param.value)}` +
|
||||
`</${param.tag}>`);
|
||||
} else if (!param.optional) {
|
||||
xml.push(`<${param.tag} />`);
|
||||
}
|
||||
});
|
||||
|
||||
xml.push(`<MaxUploads>${escapeForXml(l.MaxKeys)}</MaxUploads>`,
|
||||
`<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`
|
||||
);
|
||||
|
||||
l.Uploads.forEach(upload => {
|
||||
const val = upload.value;
|
||||
let key = upload.key;
|
||||
if (xmlParams.encoding === 'url') {
|
||||
key = querystring.escape(key);
|
||||
}
|
||||
|
||||
xml.push('<Upload>',
|
||||
`<Key>${escapeForXml(key)}</Key>`,
|
||||
`<UploadId>${escapeForXml(val.UploadId)}</UploadId>`,
|
||||
'<Initiator>',
|
||||
`<ID>${escapeForXml(val.Initiator.ID)}</ID>`,
|
||||
`<DisplayName>${escapeForXml(val.Initiator.DisplayName)}` +
|
||||
'</DisplayName>',
|
||||
'</Initiator>',
|
||||
'<Owner>',
|
||||
`<ID>${escapeForXml(val.Owner.ID)}</ID>`,
|
||||
`<DisplayName>${escapeForXml(val.Owner.DisplayName)}` +
|
||||
'</DisplayName>',
|
||||
'</Owner>',
|
||||
`<StorageClass>${escapeForXml(val.StorageClass)}` +
|
||||
'</StorageClass>',
|
||||
`<Initiated>${escapeForXml(val.Initiated)}</Initiated>`,
|
||||
'</Upload>'
|
||||
);
|
||||
});
|
||||
|
||||
l.CommonPrefixes.forEach(prefix => {
|
||||
xml.push('<CommonPrefixes>',
|
||||
`<Prefix>${escapeForXml(prefix)}</Prefix>`,
|
||||
'</CommonPrefixes>'
|
||||
);
|
||||
});
|
||||
|
||||
xml.push('</ListMultipartUploadsResult>');
|
||||
|
||||
return xml.join('');
|
||||
};
|
||||
|
||||
function convertToXml(method, xmlParams) {
|
||||
return convertMethods[method](xmlParams);
|
||||
}
|
||||
|
||||
module.exports = convertToXml;
|
|
@ -0,0 +1,9 @@
|
|||
const objectUtils = {};
|
||||
|
||||
objectUtils.getHexMD5 = base64MD5 =>
|
||||
Buffer.from(base64MD5, 'base64').toString('hex');
|
||||
|
||||
objectUtils.getBase64MD5 = hexMD5 =>
|
||||
Buffer.from(hexMD5, 'hex').toString('base64');
|
||||
|
||||
module.exports = objectUtils;
|
|
@ -45,7 +45,7 @@ function _checkModifiedSince(ifModifiedSinceTime, lastModified) {
|
|||
if (ifModifiedSinceTime) {
|
||||
res.present = true;
|
||||
const checkWith = (new Date(ifModifiedSinceTime)).getTime();
|
||||
if (isNaN(checkWith)) {
|
||||
if (Number.isNaN(Number(checkWith))) {
|
||||
res.error = errors.InvalidArgument;
|
||||
} else if (lastModified <= checkWith) {
|
||||
res.error = errors.NotModified;
|
||||
|
@ -59,7 +59,7 @@ function _checkUnmodifiedSince(ifUnmodifiedSinceTime, lastModified) {
|
|||
if (ifUnmodifiedSinceTime) {
|
||||
res.present = true;
|
||||
const checkWith = (new Date(ifUnmodifiedSinceTime)).getTime();
|
||||
if (isNaN(checkWith)) {
|
||||
if (Number.isNaN(Number(checkWith))) {
|
||||
res.error = errors.InvalidArgument;
|
||||
} else if (lastModified > checkWith) {
|
||||
res.error = errors.PreconditionFailed;
|
||||
|
|
|
@ -35,12 +35,12 @@ function checkUnsupportedRoutes(reqMethod) {
|
|||
|
||||
function checkBucketAndKey(bucketName, objectKey, method, reqQuery,
|
||||
blacklistedPrefixes, log) {
|
||||
// if empty name and request not a list Buckets
|
||||
if (!bucketName && !(method === 'GET' && !objectKey)) {
|
||||
// bucketName should also be undefined, but is checked below anyway
|
||||
const getServiceCall = (method === 'GET' && !objectKey);
|
||||
// if empty name and request not a list Buckets or preflight request
|
||||
if (!bucketName && !(getServiceCall || method === 'OPTIONS')) {
|
||||
log.debug('empty bucket name', { method: 'routes' });
|
||||
return (method !== 'OPTIONS') ?
|
||||
errors.MethodNotAllowed : errors.AccessForbidden
|
||||
.customizeDescription('CORSResponse: Bucket not found');
|
||||
return errors.MethodNotAllowed;
|
||||
}
|
||||
if (bucketName !== undefined && routesUtils.isValidBucketName(bucketName,
|
||||
blacklistedPrefixes.bucket) === false) {
|
||||
|
@ -186,7 +186,7 @@ function routes(req, res, params, logger) {
|
|||
|
||||
if (statsClient) {
|
||||
// report new request for stats
|
||||
statsClient.reportNewRequest();
|
||||
statsClient.reportNewRequest('s3');
|
||||
}
|
||||
|
||||
try {
|
||||
|
|
|
@ -16,8 +16,7 @@ function routeDELETE(request, response, api, log, statsClient) {
|
|||
return routesUtils.responseNoBody(err, corsHeaders, response,
|
||||
204, log);
|
||||
});
|
||||
} else {
|
||||
if (request.objectKey === undefined) {
|
||||
} else if (request.objectKey === undefined) {
|
||||
if (request.query.website !== undefined) {
|
||||
return api.callApiMethod('bucketDeleteWebsite', request,
|
||||
response, log, (err, corsHeaders) => {
|
||||
|
@ -71,7 +70,6 @@ function routeDELETE(request, response, api, log, statsClient) {
|
|||
204, log);
|
||||
});
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,9 +8,11 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
|
|||
} else if (request.bucketName === undefined
|
||||
&& request.objectKey === undefined) {
|
||||
// GET service
|
||||
api.callApiMethod('serviceGet', request, response, log, (err, xml) => {
|
||||
api.callApiMethod('serviceGet', request, response, log,
|
||||
(err, xml, corsHeaders) => {
|
||||
routesUtils.statsReport500(err, statsClient);
|
||||
return routesUtils.responseXMLBody(err, xml, response, log);
|
||||
return routesUtils.responseXMLBody(err, xml, response, log,
|
||||
corsHeaders);
|
||||
});
|
||||
} else if (request.objectKey === undefined) {
|
||||
// GET bucket ACL
|
||||
|
@ -74,6 +76,7 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
|
|||
});
|
||||
}
|
||||
} else {
|
||||
/* eslint-disable no-lonely-if */
|
||||
if (request.query.acl !== undefined) {
|
||||
// GET object ACL
|
||||
api.callApiMethod('objectGetACL', request, response, log,
|
||||
|
@ -113,6 +116,7 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
|
|||
range, log);
|
||||
});
|
||||
}
|
||||
/* eslint-enable */
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -10,8 +10,8 @@ function routePUT(request, response, api, log, statsClient) {
|
|||
|
||||
// content-length for object is handled separately below
|
||||
const contentLength = request.headers['content-length'];
|
||||
if ((contentLength && (isNaN(contentLength) || contentLength < 0)) ||
|
||||
contentLength === '') {
|
||||
if ((contentLength && (Number.isNaN(Number(contentLength))
|
||||
|| contentLength < 0)) || contentLength === '') {
|
||||
log.debug('invalid content-length header');
|
||||
return routesUtils.responseNoBody(
|
||||
errors.BadRequest, null, response, null, log);
|
||||
|
|
|
@ -310,7 +310,8 @@ function _contentLengthMatchesLocations(contentLength, dataLocations) {
|
|||
(sum, location) => (sum !== undefined && location.size ?
|
||||
sum + Number.parseInt(location.size, 10) :
|
||||
undefined), 0);
|
||||
return sumSizes === undefined || sumSizes === contentLength;
|
||||
return sumSizes === undefined ||
|
||||
sumSizes === Number.parseInt(contentLength, 10);
|
||||
}
|
||||
|
||||
const routesUtils = {
|
||||
|
@ -830,6 +831,7 @@ const routesUtils = {
|
|||
*/
|
||||
isValidBucketName(bucketname, prefixBlacklist) {
|
||||
const ipAddressRegex = new RegExp(/^(\d+\.){3}\d+$/);
|
||||
// eslint-disable-next-line no-useless-escape
|
||||
const dnsRegex = new RegExp(/^[a-z0-9]+([\.\-]{1}[a-z0-9]+)*$/);
|
||||
// Must be at least 3 and no more than 63 characters long.
|
||||
if (bucketname.length < 3 || bucketname.length > 63) {
|
||||
|
@ -878,7 +880,7 @@ const routesUtils = {
|
|||
*/
|
||||
statsReport500(err, statsClient) {
|
||||
if (statsClient && err && err.code === 500) {
|
||||
statsClient.report500();
|
||||
statsClient.report500('s3');
|
||||
}
|
||||
return undefined;
|
||||
},
|
||||
|
|
|
@ -91,7 +91,7 @@ class MetadataFileClient {
|
|||
return done(err);
|
||||
}
|
||||
this.logger.info('connected to record log service', { url });
|
||||
return done();
|
||||
return done(null, logProxy);
|
||||
});
|
||||
return logProxy;
|
||||
}
|
||||
|
|
|
@ -147,7 +147,7 @@ class MetadataFileServer {
|
|||
}
|
||||
|
||||
this.servers.forEach(server => {
|
||||
server.registerServices.apply(server, this.services);
|
||||
server.registerServices(...this.services);
|
||||
});
|
||||
|
||||
this.genUUIDIfNotExists();
|
||||
|
|
|
@ -179,6 +179,7 @@ class TestMatrix {
|
|||
this.listOfSpecialCase.forEach(specialCase => {
|
||||
const keyCase = specialCase.key;
|
||||
const result = Object.keys(keyCase).every(currentKey => {
|
||||
// eslint-disable-next-line no-prototype-builtins
|
||||
if (this.params.hasOwnProperty(currentKey) === false) {
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
"engines": {
|
||||
"node": "6.9.5"
|
||||
},
|
||||
"version": "7.0.1",
|
||||
"version": "7.2.0",
|
||||
"description": "Common utilities for the S3 project components",
|
||||
"main": "index.js",
|
||||
"repository": {
|
||||
|
@ -21,9 +21,12 @@
|
|||
"async": "~2.1.5",
|
||||
"debug": "~2.3.3",
|
||||
"diskusage": "^0.2.2",
|
||||
"ioredis": "2.4.0",
|
||||
"ipaddr.js": "1.2.0",
|
||||
"joi": "^10.6",
|
||||
"level": "~1.6.0",
|
||||
"level-sublevel": "~6.6.1",
|
||||
"simple-glob": "^0.1",
|
||||
"socket.io": "~1.7.3",
|
||||
"socket.io-client": "~1.7.3",
|
||||
"utf8": "2.1.2",
|
||||
|
|
|
@ -72,4 +72,20 @@ describe('AuthInfo class constructor', () => {
|
|||
const publicUser = new AuthInfo({ canonicalID: constants.publicId });
|
||||
assert.strictEqual(publicUser.isRequesterPublicUser(), true);
|
||||
});
|
||||
|
||||
it('should have a working isRequesterAServiceAccount() method', () => {
|
||||
assert.strictEqual(authInfo.isRequesterAServiceAccount(), false);
|
||||
const serviceAccount = new AuthInfo({
|
||||
canonicalID: `${constants.zenkoServiceAccount}/clueso` });
|
||||
assert.strictEqual(serviceAccount.isRequesterAServiceAccount(), true);
|
||||
});
|
||||
|
||||
it('should have a working isRequesterThisServiceAccount() method', () => {
|
||||
const serviceAccount = new AuthInfo({
|
||||
canonicalID: `${constants.zenkoServiceAccount}/clueso` });
|
||||
assert.strictEqual(
|
||||
serviceAccount.isRequesterThisServiceAccount('backbeat'), false);
|
||||
assert.strictEqual(
|
||||
serviceAccount.isRequesterThisServiceAccount('clueso'), true);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
const assert = require('assert');
|
||||
const werelogs = require('werelogs');
|
||||
|
||||
const validateAuthConfig
|
||||
= require('../../../../lib/auth/auth').inMemory.validateAuthConfig;
|
||||
const AuthLoader = require('../../../../lib/auth/auth').inMemory.AuthLoader;
|
||||
const ref = require('./sample_authdata.json');
|
||||
|
||||
werelogs.configure({
|
||||
|
@ -16,7 +15,7 @@ function getParentField(obj, field) {
|
|||
for (let i = 0; i < fields.length - 1; ++i) {
|
||||
const cur = fields[i];
|
||||
const n = Number(cur, 10);
|
||||
if (isNaN(n)) {
|
||||
if (Number.isNaN(n)) {
|
||||
parent = parent[cur];
|
||||
} else {
|
||||
parent = parent[n];
|
||||
|
@ -29,15 +28,19 @@ function getFieldName(field) {
|
|||
return field.split('.').pop();
|
||||
}
|
||||
|
||||
function shouldFail(obj, checkSas, done) {
|
||||
const res = validateAuthConfig(obj, werelogs, checkSas);
|
||||
assert.strictEqual(res, true);
|
||||
function shouldFail(obj, done) {
|
||||
const authLoader = new AuthLoader(werelogs);
|
||||
authLoader.addAccounts(obj);
|
||||
const res = authLoader.validate();
|
||||
assert.strictEqual(res, false);
|
||||
done();
|
||||
}
|
||||
|
||||
function shouldSuccess(obj, checkSas, done) {
|
||||
const res = validateAuthConfig(obj, werelogs, checkSas);
|
||||
assert.strictEqual(res, false);
|
||||
function shouldSucceed(obj, done) {
|
||||
const authLoader = new AuthLoader(werelogs);
|
||||
authLoader.addAccounts(obj);
|
||||
const res = authLoader.validate();
|
||||
assert.strictEqual(res, true);
|
||||
done();
|
||||
}
|
||||
|
||||
|
@ -45,15 +48,15 @@ const should = {
|
|||
_exec: undefined,
|
||||
missingField: (obj, field, done) => {
|
||||
delete getParentField(obj, field)[getFieldName(field)];
|
||||
should._exec(obj, true, done);
|
||||
should._exec(obj, done);
|
||||
},
|
||||
modifiedField: (obj, field, value, done) => {
|
||||
getParentField(obj, field)[getFieldName(field)] = value;
|
||||
should._exec(obj, true, done);
|
||||
should._exec(obj, done);
|
||||
},
|
||||
};
|
||||
|
||||
describe('S3 AuthData Checker', () => {
|
||||
describe('AuthLoader class', () => {
|
||||
let obj = {};
|
||||
|
||||
beforeEach(done => {
|
||||
|
@ -71,18 +74,10 @@ describe('S3 AuthData Checker', () => {
|
|||
['accounts.0.email', 64],
|
||||
['accounts.0.arn', undefined],
|
||||
['accounts.0.arn', 64],
|
||||
['accounts.0.sasToken', undefined],
|
||||
['accounts.0.sasToken', 64],
|
||||
['accounts.0.canonicalID', undefined],
|
||||
['accounts.0.canonicalID', 64],
|
||||
['accounts.0.users', 'not an object'],
|
||||
['accounts.0.users.0.arn', undefined],
|
||||
['accounts.0.users.0.arn', 64],
|
||||
['accounts.0.users.0.email', undefined],
|
||||
['accounts.0.users.0.email', 64],
|
||||
['accounts.0.users.0.keys', undefined],
|
||||
['accounts.0.users.0.keys', 'not an Array'],
|
||||
['accounts.0.keys', 'not an Array'],
|
||||
['accounts.0.keys', undefined],
|
||||
].forEach(test => {
|
||||
if (test[1] === undefined) {
|
||||
// Check a failure when deleting required fields
|
||||
|
@ -93,7 +88,8 @@ describe('S3 AuthData Checker', () => {
|
|||
} else {
|
||||
// Check a failure when the type of field is different than
|
||||
// expected
|
||||
it(`should fail when modified field ${test[0]}${test[1]}`, done => {
|
||||
it(`should fail when modified field ${test[0]} ${test[1]}`,
|
||||
done => {
|
||||
should._exec = shouldFail;
|
||||
should.modifiedField(obj, test[0], test[1], done);
|
||||
});
|
||||
|
@ -109,52 +105,30 @@ describe('S3 AuthData Checker', () => {
|
|||
'accounts.0.users',
|
||||
].forEach(test => {
|
||||
// Check a success when deleting optional fields
|
||||
it(`should success when missing field ${test[0]}`, done => {
|
||||
should._exec = shouldSuccess;
|
||||
it(`should return success when missing field ${test}`, done => {
|
||||
should._exec = shouldSucceed;
|
||||
should.missingField(obj, test[0], done);
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return success if no sasToken and checkSas false', done => {
|
||||
obj.accounts[0].sasToken = undefined;
|
||||
shouldSuccess(obj, false, done);
|
||||
});
|
||||
|
||||
it('Should return error on two same sasTokens and checkSas true', done => {
|
||||
obj.accounts[0].sasToken = obj.accounts[1].sasToken;
|
||||
shouldFail(obj, true, done);
|
||||
});
|
||||
|
||||
it('Should return success on two same sasTokens and checkSas false',
|
||||
done => {
|
||||
obj.accounts[0].sasToken = obj.accounts[1].sasToken;
|
||||
shouldSuccess(obj, false, done);
|
||||
});
|
||||
|
||||
it('Should return error on two same canonicalID', done => {
|
||||
obj.accounts[0].canonicalID = obj.accounts[1].canonicalID;
|
||||
shouldFail(obj, null, done);
|
||||
shouldFail(obj, done);
|
||||
});
|
||||
|
||||
it('Should return error on two same emails, account-account', done => {
|
||||
it('Should return error on two same emails', done => {
|
||||
obj.accounts[0].email = obj.accounts[1].email;
|
||||
shouldFail(obj, null, done);
|
||||
});
|
||||
|
||||
it('Should return error on two same emails account-user', done => {
|
||||
obj.accounts[0].users[0].email = obj.accounts[1].email;
|
||||
shouldFail(obj, null, done);
|
||||
shouldFail(obj, done);
|
||||
});
|
||||
|
||||
it('Should return error on two same arn', done => {
|
||||
obj.accounts[0].arn = obj.accounts[0].users[0].arn;
|
||||
shouldFail(obj, null, done);
|
||||
obj.accounts[0].arn = obj.accounts[1].arn;
|
||||
shouldFail(obj, done);
|
||||
});
|
||||
|
||||
it('Should return error on two same access key', done => {
|
||||
obj.accounts[0].keys[0].access =
|
||||
obj.accounts[0].users[0].keys[0].access;
|
||||
shouldFail(obj, null, done);
|
||||
obj.accounts[0].keys[0].access = obj.accounts[1].keys[0].access;
|
||||
shouldFail(obj, done);
|
||||
});
|
||||
});
|
||||
|
|
@ -1,47 +0,0 @@
|
|||
const assert = require('assert');
|
||||
const Backend = require('../../../../lib/auth/auth').inMemory.backend.s3;
|
||||
const DummyRequestLogger = require('../../helpers').DummyRequestLogger;
|
||||
const authData = require('./sample_authdata');
|
||||
|
||||
const backend = new Backend(JSON.parse(JSON.stringify(authData)));
|
||||
const counter = 10;
|
||||
// eslint-disable-next-line arrow-body-style
|
||||
const specificResource = [...Array(counter).keys()].map(i => {
|
||||
return {
|
||||
key: `key${i}`,
|
||||
};
|
||||
});
|
||||
const generalResource = 'bucketName';
|
||||
|
||||
const requestContexts = {
|
||||
constantParams: {
|
||||
generalResource,
|
||||
},
|
||||
parameterize: {
|
||||
specificResource,
|
||||
},
|
||||
};
|
||||
const service = 's3';
|
||||
const userArn = 'aws::iam:123456789012:root';
|
||||
const log = new DummyRequestLogger();
|
||||
// eslint-disable-next-line arrow-body-style
|
||||
const expectedResults = specificResource.map(entry => {
|
||||
return {
|
||||
isAllowed: true,
|
||||
arn: `arn:aws:${service}:::${generalResource}/${entry.key}`,
|
||||
versionId: undefined,
|
||||
};
|
||||
});
|
||||
|
||||
describe('S3AuthBackend.checkPolicies', () => {
|
||||
it(' should mock successful results', done => {
|
||||
backend.checkPolicies(requestContexts, userArn, log,
|
||||
(err, vaultReturnObject) => {
|
||||
assert.strictEqual(err, null, `Unexpected err: ${err}`);
|
||||
assert.deepStrictEqual(vaultReturnObject, {
|
||||
message: { body: expectedResults },
|
||||
});
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -2,7 +2,7 @@ const assert = require('assert');
|
|||
|
||||
const Indexer = require('../../../../lib/auth/in_memory/Indexer');
|
||||
const ref = require('./sample_authdata.json');
|
||||
const { should } = require('./validateAuthConfig');
|
||||
const { should } = require('./AuthLoader.spec');
|
||||
|
||||
describe('S3 AuthData Indexer', () => {
|
||||
let obj = {};
|
||||
|
@ -28,15 +28,6 @@ describe('S3 AuthData Indexer', () => {
|
|||
done();
|
||||
});
|
||||
|
||||
it('Should return user from email', done => {
|
||||
const res = index.getEntityByEmail(obj.accounts[0].users[0].email);
|
||||
assert.strictEqual(typeof res, 'object');
|
||||
assert.strictEqual(res.arn, obj.accounts[0].arn);
|
||||
assert.strictEqual(res.IAMdisplayName,
|
||||
obj.accounts[0].users[0].name);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Should return account from key', done => {
|
||||
const res = index.getEntityByKey(obj.accounts[0].keys[0].access);
|
||||
assert.strictEqual(typeof res, 'object');
|
||||
|
@ -44,16 +35,6 @@ describe('S3 AuthData Indexer', () => {
|
|||
done();
|
||||
});
|
||||
|
||||
it('Should return user from key', done => {
|
||||
const res = index.getEntityByKey(obj.accounts[0].users[0].keys[0]
|
||||
.access);
|
||||
assert.strictEqual(typeof res, 'object');
|
||||
assert.strictEqual(res.arn, obj.accounts[0].arn);
|
||||
assert.strictEqual(res.IAMdisplayName,
|
||||
obj.accounts[0].users[0].name);
|
||||
done();
|
||||
});
|
||||
|
||||
it('should index account without keys', done => {
|
||||
should._exec = () => {
|
||||
index = new Indexer(obj);
|
||||
|
|
|
@ -2,44 +2,22 @@
|
|||
"accounts": [{
|
||||
"name": "Bart",
|
||||
"email": "sampleaccount1@sampling.com",
|
||||
"arn": "aws::iam:123456789012:root",
|
||||
"arn": "arn:aws:iam::123456789012:root",
|
||||
"canonicalID": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be",
|
||||
"shortid": "123456789012",
|
||||
"keys": [{
|
||||
"access": "accessKey1",
|
||||
"secret": "verySecretKey1"
|
||||
}],
|
||||
"users": [{
|
||||
"name": "Bart Jr",
|
||||
"email": "user1.sampleaccount2@sampling.com",
|
||||
"arn": "aws::iam:123456789013:bart",
|
||||
"keys": [{
|
||||
"access": "USERBARTFUNACCESSKEY",
|
||||
"secret": "verySecretKey1"
|
||||
}]
|
||||
}],
|
||||
"sasToken": "test0"
|
||||
}, {
|
||||
"name": "Lisa",
|
||||
"email": "sampleaccount2@sampling.com",
|
||||
"arn": "aws::iam:accessKey2:user/Lisa",
|
||||
"arn": "arn:aws:iam::123456789013:root",
|
||||
"canonicalID": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2bf",
|
||||
"shortid": "123456789012",
|
||||
"shortid": "123456789013",
|
||||
"keys": [{
|
||||
"access": "accessKey2",
|
||||
"secret": "verySecretKey2"
|
||||
}],
|
||||
"sasToken": "test1"
|
||||
}, {
|
||||
"name": "Docker",
|
||||
"email": "sampleaccount3@sampling.com",
|
||||
"arn": "aws::iam:accessKeyDocker:user/Docker",
|
||||
"canonicalID": "sd359df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47eh3hd",
|
||||
"shortid": "123456789012",
|
||||
"keys": [{
|
||||
"access": "accessKeyDocker",
|
||||
"secret": "verySecretKeyDocker"
|
||||
}],
|
||||
"sasToken": "test2"
|
||||
}]
|
||||
}]
|
||||
}
|
||||
|
|
|
@ -2,13 +2,12 @@
|
|||
"accounts": [{
|
||||
"name": "Zenko",
|
||||
"email": "sampleaccount4@sampling.com",
|
||||
"arn": "aws::iam:accessKeyZenko:user/Zenko",
|
||||
"arn": "aws::iam:123456789015:root",
|
||||
"canonicalID": "newCanId",
|
||||
"shortid": "123456789012",
|
||||
"shortid": "123456789015",
|
||||
"keys": [{
|
||||
"access": "accessKeyZenko",
|
||||
"secret": "verySecretKeyZenko"
|
||||
}],
|
||||
"sasToken": "test2"
|
||||
}]
|
||||
}]
|
||||
}
|
||||
|
|
|
@ -7,16 +7,20 @@ const constructStringToSign =
|
|||
const DummyRequestLogger = require('../../helpers').DummyRequestLogger;
|
||||
|
||||
const log = new DummyRequestLogger();
|
||||
|
||||
describe('constructStringToSign function', () => {
|
||||
[
|
||||
{ path: '', desc: 'constructStringToSign function' },
|
||||
{ path: '/_/proxy', desc: 'constructStringToSign function with proxy' },
|
||||
].forEach(item => {
|
||||
describe(item.desc, () => {
|
||||
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
||||
// latest/API/sig-v4-header-based-auth.html
|
||||
it('should construct a stringToSign in accordance ' +
|
||||
'with AWS rules for a get object request (header auth)', () => {
|
||||
const path = '/test.txt';
|
||||
const params = {
|
||||
request: {
|
||||
method: 'GET',
|
||||
path: '/test.txt',
|
||||
path: `${item.path}${path}`,
|
||||
headers: {
|
||||
'host': 'examplebucket.s3.amazonaws.com',
|
||||
'x-amz-date': '20130524T000000Z',
|
||||
|
@ -38,11 +42,13 @@ describe('constructStringToSign function', () => {
|
|||
credentialScope: '20130524/us-east-1/s3/aws4_request',
|
||||
timestamp: '20130524T000000Z',
|
||||
log,
|
||||
proxyPath: item.path ? path : undefined,
|
||||
};
|
||||
const expectedOutput = 'AWS4-HMAC-SHA256\n' +
|
||||
'20130524T000000Z\n' +
|
||||
'20130524/us-east-1/s3/aws4_request\n' +
|
||||
'7344ae5b7ee6c3e7e6b0fe0640412a37625d1fbfff95c48bbb2dc43964946972';
|
||||
'7344ae5b7ee6c3e7e6b0fe0640412a37625d1fbfff95c48bbb2dc439649' +
|
||||
'46972';
|
||||
const actualOutput = constructStringToSign(params);
|
||||
assert.strictEqual(actualOutput, expectedOutput);
|
||||
});
|
||||
|
@ -51,10 +57,11 @@ describe('constructStringToSign function', () => {
|
|||
// latest/API/sig-v4-header-based-auth.html
|
||||
it('should construct a stringToSign in accordance ' +
|
||||
'with AWS rules for a put object request (header auth)', () => {
|
||||
const path = '/test$file.text';
|
||||
const params = {
|
||||
request: {
|
||||
method: 'PUT',
|
||||
path: '/test$file.text',
|
||||
path: `${item.path}${path}`,
|
||||
headers: {
|
||||
'date': 'Fri, 24 May 2013 00:00:00 GMT',
|
||||
'host': 'examplebucket.s3.amazonaws.com',
|
||||
|
@ -78,6 +85,7 @@ describe('constructStringToSign function', () => {
|
|||
credentialScope: '20130524/us-east-1/s3/aws4_request',
|
||||
timestamp: '20130524T000000Z',
|
||||
log,
|
||||
proxyPath: item.path ? path : undefined,
|
||||
};
|
||||
const expectedOutput = 'AWS4-HMAC-SHA256\n' +
|
||||
'20130524T000000Z\n' +
|
||||
|
@ -91,11 +99,13 @@ describe('constructStringToSign function', () => {
|
|||
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
||||
// latest/API/sig-v4-header-based-auth.html
|
||||
it('should construct a stringToSign in accordance ' +
|
||||
'with AWS rules for a pre-signed get url request (query auth)', () => {
|
||||
'with AWS rules for a pre-signed get url request (query auth)',
|
||||
() => {
|
||||
const path = '/test.txt';
|
||||
const params = {
|
||||
request: {
|
||||
method: 'GET',
|
||||
path: '/test.txt',
|
||||
path: `${item.path}${path}`,
|
||||
headers: {
|
||||
host: 'examplebucket.s3.amazonaws.com',
|
||||
},
|
||||
|
@ -113,6 +123,7 @@ describe('constructStringToSign function', () => {
|
|||
credentialScope: '20130524/us-east-1/s3/aws4_request',
|
||||
timestamp: '20130524T000000Z',
|
||||
log,
|
||||
proxyPath: item.path ? path : undefined,
|
||||
};
|
||||
const expectedOutput = 'AWS4-HMAC-SHA256\n' +
|
||||
'20130524T000000Z\n' +
|
||||
|
@ -123,3 +134,4 @@ describe('constructStringToSign function', () => {
|
|||
assert.strictEqual(actualOutput, expectedOutput);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const assert = require('assert');
|
||||
|
||||
const RedisClient = require('../../../lib/metrics/RedisClient');
|
||||
const StatsClient = require('../../../lib/metrics/StatsClient');
|
||||
|
||||
// setup redis client
|
||||
const config = {
|
||||
host: '127.0.0.1',
|
||||
port: 6379,
|
||||
enableOfflineQueue: false,
|
||||
};
|
||||
const fakeLogger = {
|
||||
trace: () => {},
|
||||
error: () => {},
|
||||
};
|
||||
const redisClient = new RedisClient(config, fakeLogger);
|
||||
|
||||
// setup stats client
|
||||
const STATS_INTERVAL = 5; // 5 seconds
|
||||
const STATS_EXPIRY = 30; // 30 seconds
|
||||
const statsClient = new StatsClient(redisClient, STATS_INTERVAL, STATS_EXPIRY);
|
||||
|
||||
describe('StatsClient class', () => {
|
||||
const id = 'arsenal-test';
|
||||
|
||||
afterEach(() => redisClient.clear(() => {}));
|
||||
|
||||
it('should correctly record a new request', () => {
|
||||
statsClient.reportNewRequest(id, (err, res) => {
|
||||
assert.ifError(err);
|
||||
assert(Array.isArray(res));
|
||||
assert.equal(res.length, 2);
|
||||
|
||||
const expected = [[null, 1], [null, 1]];
|
||||
assert.deepEqual(res, expected);
|
||||
});
|
||||
|
||||
statsClient.reportNewRequest(id, (err, res) => {
|
||||
assert.ifError(err);
|
||||
assert(Array.isArray(res));
|
||||
assert.equal(res.length, 2);
|
||||
|
||||
const expected = [[null, 2], [null, 1]];
|
||||
assert.deepEqual(res, expected);
|
||||
});
|
||||
});
|
||||
|
||||
it('should correctly record a 500 on the server', () => {
|
||||
statsClient.report500(id, (err, res) => {
|
||||
assert.ifError(err);
|
||||
assert(Array.isArray(res));
|
||||
assert.equal(res.length, 2);
|
||||
|
||||
const expected = [[null, 1], [null, 1]];
|
||||
assert.deepEqual(res, expected);
|
||||
});
|
||||
});
|
||||
|
||||
it('should respond back with total requests', () => {
|
||||
statsClient.reportNewRequest(id, err => {
|
||||
assert.ifError(err);
|
||||
});
|
||||
statsClient.report500(id, err => {
|
||||
assert.ifError(err);
|
||||
});
|
||||
statsClient.getStats(fakeLogger, id, (err, res) => {
|
||||
assert.ifError(err);
|
||||
assert.equal(typeof res, 'object');
|
||||
assert.equal(Object.keys(res).length, 3);
|
||||
assert.equal(res.sampleDuration, STATS_EXPIRY);
|
||||
|
||||
const expected = { 'requests': 1, '500s': 1, 'sampleDuration': 30 };
|
||||
assert.deepEqual(res, expected);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,136 @@
|
|||
const assert = require('assert');
|
||||
|
||||
const ARN = require('../../../lib/models/ARN');
|
||||
|
||||
describe('ARN object model', () => {
|
||||
describe('valid ARNs', () => {
|
||||
[{ arn: 'arn:aws:iam::123456789012:role/backbeat',
|
||||
service: 'iam',
|
||||
accountId: '123456789012',
|
||||
resource: 'role/backbeat',
|
||||
isIAMAccount: false,
|
||||
isIAMUser: false,
|
||||
isIAMRole: true,
|
||||
},
|
||||
{ arn: 'arn:aws:iam::*:role/backbeat',
|
||||
service: 'iam',
|
||||
accountId: '*',
|
||||
resource: 'role/backbeat',
|
||||
isIAMAccount: false,
|
||||
isIAMUser: false,
|
||||
isIAMRole: true,
|
||||
},
|
||||
{ arn: 'arn:aws:iam:::role/backbeat',
|
||||
service: 'iam',
|
||||
accountId: null,
|
||||
resource: 'role/backbeat',
|
||||
isIAMAccount: false,
|
||||
isIAMUser: false,
|
||||
isIAMRole: false, // not a valid role without an account ID
|
||||
},
|
||||
{ arn: 'arn:aws:iam::123456789012:user/bart',
|
||||
service: 'iam',
|
||||
accountId: '123456789012',
|
||||
resource: 'user/bart',
|
||||
isIAMAccount: false,
|
||||
isIAMUser: true,
|
||||
isIAMRole: false,
|
||||
},
|
||||
{ arn: 'arn:aws:iam:::user/bart',
|
||||
service: 'iam',
|
||||
accountId: null,
|
||||
resource: 'user/bart',
|
||||
isIAMAccount: false,
|
||||
isIAMUser: false, // not a valid user without an account ID
|
||||
isIAMRole: false,
|
||||
},
|
||||
{ arn: 'arn:aws:iam::123456789012:root',
|
||||
service: 'iam',
|
||||
accountId: '123456789012',
|
||||
resource: 'root',
|
||||
isIAMAccount: true,
|
||||
isIAMUser: false,
|
||||
isIAMRole: false,
|
||||
},
|
||||
{ arn: 'arn:aws:iam:::root',
|
||||
service: 'iam',
|
||||
accountId: null,
|
||||
resource: 'root',
|
||||
isIAMAccount: false, // not a valid account without an account ID
|
||||
isIAMUser: false,
|
||||
isIAMRole: false,
|
||||
},
|
||||
{ arn: 'arn:aws:s3::123456789012:foo/bar/baz/qux',
|
||||
service: 's3',
|
||||
accountId: '123456789012',
|
||||
resource: 'foo/bar/baz/qux',
|
||||
isIAMAccount: false,
|
||||
isIAMUser: false,
|
||||
isIAMRole: false,
|
||||
},
|
||||
{ arn: 'arn:aws:s3::123456789012:foo:bar/baz/qux',
|
||||
service: 's3',
|
||||
accountId: '123456789012',
|
||||
resource: 'foo:bar/baz/qux',
|
||||
isIAMAccount: false,
|
||||
isIAMUser: false,
|
||||
isIAMRole: false,
|
||||
},
|
||||
{ arn: 'arn:aws:sts::123456789012:foobar',
|
||||
service: 'sts',
|
||||
accountId: '123456789012',
|
||||
resource: 'foobar',
|
||||
isIAMAccount: false,
|
||||
isIAMUser: false,
|
||||
isIAMRole: false,
|
||||
},
|
||||
{ arn: 'arn:aws:ring::123456789012:foobar',
|
||||
service: 'ring',
|
||||
accountId: '123456789012',
|
||||
resource: 'foobar',
|
||||
isIAMAccount: false,
|
||||
isIAMUser: false,
|
||||
isIAMRole: false,
|
||||
},
|
||||
{ arn: 'arn:scality:utapi::123456789012:foobar',
|
||||
service: 'utapi',
|
||||
accountId: '123456789012',
|
||||
resource: 'foobar',
|
||||
isIAMAccount: false,
|
||||
isIAMUser: false,
|
||||
isIAMRole: false,
|
||||
},
|
||||
{ arn: 'arn:scality:sso::123456789012:foobar',
|
||||
service: 'sso',
|
||||
accountId: '123456789012',
|
||||
resource: 'foobar',
|
||||
isIAMAccount: false,
|
||||
isIAMUser: false,
|
||||
isIAMRole: false,
|
||||
},
|
||||
].forEach(arnTest => it(`should accept ARN "${arnTest.arn}"`, () => {
|
||||
const arnObj = ARN.createFromString(arnTest.arn);
|
||||
assert(arnObj instanceof ARN);
|
||||
assert.strictEqual(arnObj.getService(), arnTest.service);
|
||||
assert.strictEqual(arnObj.getAccountId(), arnTest.accountId);
|
||||
assert.strictEqual(arnObj.getResource(), arnTest.resource);
|
||||
assert.strictEqual(arnObj.isIAMAccount(), arnTest.isIAMAccount);
|
||||
assert.strictEqual(arnObj.isIAMUser(), arnTest.isIAMUser);
|
||||
assert.strictEqual(arnObj.isIAMRole(), arnTest.isIAMRole);
|
||||
assert.strictEqual(arnObj.toString(), arnTest.arn);
|
||||
}));
|
||||
});
|
||||
describe('bad ARNs', () => {
|
||||
['',
|
||||
':',
|
||||
'foo:',
|
||||
'arn::iam::123456789012:role/backbeat',
|
||||
'arn:aws:xxx::123456789012:role/backbeat',
|
||||
'arn:aws:s3::123456789012345:role/backbeat',
|
||||
'arn:aws:s3::12345678901b:role/backbeat',
|
||||
].forEach(arn => it(`should fail with invalid ARN "${arn}"`, () => {
|
||||
const res = ARN.createFromString(arn);
|
||||
assert.notStrictEqual(res.error, undefined);
|
||||
}));
|
||||
});
|
||||
});
|
|
@ -1,5 +1,6 @@
|
|||
const assert = require('assert');
|
||||
const ObjectMD = require('../../../lib/models/ObjectMD');
|
||||
const constants = require('../../../lib/constants');
|
||||
|
||||
describe('ObjectMD class setters/getters', () => {
|
||||
let md = null;
|
||||
|
@ -10,7 +11,6 @@ describe('ObjectMD class setters/getters', () => {
|
|||
|
||||
[
|
||||
// In order: data property, value to set/get, default value
|
||||
['ModelVersion', null, 3],
|
||||
['OwnerDisplayName', null, ''],
|
||||
['OwnerDisplayName', 'owner-display-name'],
|
||||
['OwnerId', null, ''],
|
||||
|
@ -79,6 +79,8 @@ describe('ObjectMD class setters/getters', () => {
|
|||
destination: '',
|
||||
storageClass: '',
|
||||
role: '',
|
||||
storageType: '',
|
||||
dataStoreVersionId: '',
|
||||
}],
|
||||
['ReplicationInfo', {
|
||||
status: 'PENDING',
|
||||
|
@ -87,6 +89,8 @@ describe('ObjectMD class setters/getters', () => {
|
|||
storageClass: 'STANDARD',
|
||||
role: 'arn:aws:iam::account-id:role/src-resource,' +
|
||||
'arn:aws:iam::account-id:role/dest-resource',
|
||||
storageType: 'aws_s3',
|
||||
dataStoreVersionId: 'QWY1QQwWn9xJcoz0EgJjJ_t8g4nMYsxo',
|
||||
}],
|
||||
['DataStoreName', null, ''],
|
||||
].forEach(test => {
|
||||
|
@ -110,3 +114,91 @@ describe('ObjectMD class setters/getters', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('ObjectMD import from stored blob', () => {
|
||||
it('should export and import correctly the latest model version', () => {
|
||||
const md = new ObjectMD();
|
||||
const jsonMd = md.getSerialized();
|
||||
const importedRes = ObjectMD.createFromBlob(jsonMd);
|
||||
assert.ifError(importedRes.error);
|
||||
const importedMd = importedRes.result;
|
||||
assert.deepStrictEqual(md, importedMd);
|
||||
});
|
||||
|
||||
it('should convert old location to new location', () => {
|
||||
const md = new ObjectMD();
|
||||
const value = md.getValue();
|
||||
value['md-model-version'] = 1;
|
||||
value.location = 'stringLocation';
|
||||
const jsonMd = JSON.stringify(value);
|
||||
const importedRes = ObjectMD.createFromBlob(jsonMd);
|
||||
assert.strictEqual(importedRes.error, undefined);
|
||||
const importedMd = importedRes.result;
|
||||
const valueImported = importedMd.getValue();
|
||||
assert.strictEqual(valueImported['md-model-version'],
|
||||
constants.mdModelVersion);
|
||||
assert.deepStrictEqual(valueImported.location,
|
||||
[{ key: 'stringLocation' }]);
|
||||
});
|
||||
|
||||
it('should keep null location as is', () => {
|
||||
const md = new ObjectMD();
|
||||
const value = md.getValue();
|
||||
value.location = null;
|
||||
const jsonMd = JSON.stringify(value);
|
||||
const importedRes = ObjectMD.createFromBlob(jsonMd);
|
||||
assert.strictEqual(importedRes.error, undefined);
|
||||
const importedMd = importedRes.result;
|
||||
const valueImported = importedMd.getValue();
|
||||
assert.deepStrictEqual(valueImported.location, null);
|
||||
importedMd.setLocation([]);
|
||||
assert.deepStrictEqual(importedMd.getValue().location, null);
|
||||
});
|
||||
|
||||
it('should add dataStoreName attribute if missing', () => {
|
||||
const md = new ObjectMD();
|
||||
const value = md.getValue();
|
||||
value['md-model-version'] = 2;
|
||||
delete value.dataStoreName;
|
||||
const jsonMd = JSON.stringify(value);
|
||||
const importedRes = ObjectMD.createFromBlob(jsonMd);
|
||||
assert.strictEqual(importedRes.error, undefined);
|
||||
const importedMd = importedRes.result;
|
||||
const valueImported = importedMd.getValue();
|
||||
assert.strictEqual(valueImported['md-model-version'],
|
||||
constants.mdModelVersion);
|
||||
assert.notStrictEqual(valueImported.dataStoreName, undefined);
|
||||
});
|
||||
|
||||
it('should return undefined for dataStoreVersionId if no object location',
|
||||
() => {
|
||||
const md = new ObjectMD();
|
||||
const value = md.getValue();
|
||||
const jsonMd = JSON.stringify(value);
|
||||
const importedRes = ObjectMD.createFromBlob(jsonMd);
|
||||
assert.strictEqual(importedRes.error, undefined);
|
||||
const importedMd = importedRes.result;
|
||||
assert.strictEqual(importedMd.getDataStoreVersionId(), undefined);
|
||||
});
|
||||
|
||||
it('should get dataStoreVersionId if saved in object location', () => {
|
||||
const md = new ObjectMD();
|
||||
const dummyLocation = {
|
||||
dataStoreVersionId: 'data-store-version-id',
|
||||
};
|
||||
md.setLocation([dummyLocation]);
|
||||
const value = md.getValue();
|
||||
const jsonMd = JSON.stringify(value);
|
||||
const importedRes = ObjectMD.createFromBlob(jsonMd);
|
||||
assert.strictEqual(importedRes.error, undefined);
|
||||
const importedMd = importedRes.result;
|
||||
assert.strictEqual(importedMd.getDataStoreVersionId(),
|
||||
dummyLocation.dataStoreVersionId);
|
||||
});
|
||||
|
||||
it('should return an error if blob is malformed JSON', () => {
|
||||
const importedRes = ObjectMD.createFromBlob('{BAD JSON}');
|
||||
assert.notStrictEqual(importedRes.error, undefined);
|
||||
assert.strictEqual(importedRes.result, undefined);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -9,7 +9,7 @@ describe('round robin hosts', () => {
|
|||
caption: 'with { host, port } objects in list',
|
||||
hostsList: [{ host: '1.2.3.0', port: 1000 },
|
||||
{ host: '1.2.3.1', port: 1001 },
|
||||
{ host: '1.2.3.2', port: 1002 }],
|
||||
{ host: '1.2.3.2' }],
|
||||
}, {
|
||||
caption: 'with "host:port" strings in list',
|
||||
hostsList: ['1.2.3.0:1000',
|
||||
|
@ -18,7 +18,8 @@ describe('round robin hosts', () => {
|
|||
}].forEach(testCase => describe(testCase.caption, () => {
|
||||
beforeEach(() => {
|
||||
roundRobin = new RoundRobin(testCase.hostsList,
|
||||
{ stickyCount: 10 });
|
||||
{ stickyCount: 10,
|
||||
defaultPort: 1002 });
|
||||
});
|
||||
|
||||
it('should pick all hosts in turn', () => {
|
||||
|
@ -31,8 +32,7 @@ describe('round robin hosts', () => {
|
|||
// expect 3 loops of 10 times each of the 3 hosts
|
||||
for (let i = 0; i < 90; ++i) {
|
||||
const hostItem = roundRobin.pickHost();
|
||||
hostsPickCount[hostItem.host] =
|
||||
hostsPickCount[hostItem.host] + 1;
|
||||
hostsPickCount[hostItem.host] += 1;
|
||||
}
|
||||
assert.strictEqual(hostsPickCount['1.2.3.0'], 30);
|
||||
assert.strictEqual(hostsPickCount['1.2.3.1'], 30);
|
||||
|
@ -51,8 +51,7 @@ describe('round robin hosts', () => {
|
|||
const curHost = roundRobin.getCurrentHost();
|
||||
for (let i = 0; i < 10; ++i) {
|
||||
const hostItem = roundRobin.pickHost();
|
||||
hostsPickCount[hostItem.host] =
|
||||
hostsPickCount[hostItem.host] + 1;
|
||||
hostsPickCount[hostItem.host] += 1;
|
||||
}
|
||||
assert.strictEqual(hostsPickCount[curHost.host], 10);
|
||||
});
|
||||
|
@ -67,8 +66,7 @@ describe('round robin hosts', () => {
|
|||
// expect each host to be picked up 3 times
|
||||
for (let i = 0; i < 9; ++i) {
|
||||
const hostItem = roundRobin.pickNextHost();
|
||||
hostsPickCount[hostItem.host] =
|
||||
hostsPickCount[hostItem.host] + 1;
|
||||
hostsPickCount[hostItem.host] += 1;
|
||||
}
|
||||
assert.strictEqual(hostsPickCount['1.2.3.0'], 3);
|
||||
assert.strictEqual(hostsPickCount['1.2.3.1'], 3);
|
||||
|
@ -101,5 +99,18 @@ describe('round robin hosts', () => {
|
|||
// eslint-disable-next-line no-new
|
||||
new RoundRobin(['zenko.io', 'zenka.ia']);
|
||||
});
|
||||
|
||||
it('should have set default port if not in bootstrap list', () => {
|
||||
// the current host should be picked 10 times in a row
|
||||
const portMap = {
|
||||
'1.2.3.0': 1000,
|
||||
'1.2.3.1': 1001,
|
||||
'1.2.3.2': 1002,
|
||||
};
|
||||
for (let i = 0; i < 100; ++i) {
|
||||
const hostItem = roundRobin.pickHost();
|
||||
assert.strictEqual(hostItem.port, portMap[hostItem.host]);
|
||||
}
|
||||
});
|
||||
}));
|
||||
});
|
||||
|
|
|
@ -175,7 +175,7 @@ describe('REST interface for blob data storage', () => {
|
|||
const value = resp.read();
|
||||
assert.strictEqual(
|
||||
value.toString(),
|
||||
contents.slice.apply(contents, sliceArgs));
|
||||
contents.slice(...sliceArgs));
|
||||
checkContentRange(resp, contentRange[0],
|
||||
contentRange[1]);
|
||||
done();
|
||||
|
|
|
@ -16,21 +16,21 @@ const levelNet = require('../../../../lib/network/rpc/level-net');
|
|||
// simply forward the API calls to the db as-is
|
||||
const dbAsyncAPI = {
|
||||
put: (env, ...args) => {
|
||||
env.subDb.put.apply(env.subDb, args);
|
||||
env.subDb.put(...args);
|
||||
},
|
||||
del: (env, ...args) => {
|
||||
env.subDb.del.apply(env.subDb, args);
|
||||
env.subDb.del(...args);
|
||||
},
|
||||
get: (env, ...args) => {
|
||||
env.subDb.get.apply(env.subDb, args);
|
||||
env.subDb.get(...args);
|
||||
},
|
||||
batch: (env, ...args) => {
|
||||
env.subDb.batch.apply(env.subDb, args);
|
||||
env.subDb.batch(...args);
|
||||
},
|
||||
};
|
||||
const dbSyncAPI = {
|
||||
createReadStream:
|
||||
(env, ...args) => env.subDb.createReadStream.apply(env.subDb, args),
|
||||
(env, ...args) => env.subDb.createReadStream(args),
|
||||
};
|
||||
|
||||
describe('level-net - LevelDB over network', () => {
|
||||
|
|
|
@ -0,0 +1,647 @@
|
|||
const assert = require('assert');
|
||||
const Principal = require('../../../lib/policyEvaluator/principal');
|
||||
const RequestContext = require('../../../lib/policyEvaluator/RequestContext');
|
||||
|
||||
const defaultAccountId = '123456789012';
|
||||
const anotherAccountId = '098765432112';
|
||||
const defaultAccountArn = `arn:aws:iam::${defaultAccountId}:root`;
|
||||
const defaultUserArn = `arn:aws:iam::${defaultAccountId}:user/test`;
|
||||
const defaultRole = `arn:aws:iam::${defaultAccountId}:role/role1`;
|
||||
const defaultAssumedRole =
|
||||
`arn:aws:sts::${defaultAccountId}:assumed-role/role1/session`;
|
||||
const defaultSamlProvider =
|
||||
`arn:aws:iam::${defaultAccountId}:saml-provider/provider1`;
|
||||
const defaultFederatedUser =
|
||||
`arn:aws:sts::${defaultAccountId}:federated-user/foo`;
|
||||
const anotherAccountArn = `arn:aws:iam::${anotherAccountId}:root`;
|
||||
const anotherUserArn = `arn:aws:iam::${anotherAccountId}:user/test`;
|
||||
const defaultValids = {
|
||||
AWS: [
|
||||
defaultAccountId,
|
||||
defaultAccountArn,
|
||||
],
|
||||
};
|
||||
|
||||
const defaultParams = {
|
||||
log: {
|
||||
trace: () => {},
|
||||
debug: () => {},
|
||||
info: () => {},
|
||||
},
|
||||
};
|
||||
|
||||
describe('Principal evaluator', () => {
|
||||
[
|
||||
{
|
||||
name: 'anonymous as Principal (effect Allow) -> grant access',
|
||||
statement: {
|
||||
Principal: '*',
|
||||
Effect: 'Allow',
|
||||
},
|
||||
valids: defaultValids,
|
||||
result: 'Allow',
|
||||
},
|
||||
{
|
||||
name: 'anonymous as Principal (effect Deny) -> deny access',
|
||||
statement: {
|
||||
Principal: '*',
|
||||
Effect: 'Deny',
|
||||
},
|
||||
valids: defaultValids,
|
||||
result: 'Deny',
|
||||
},
|
||||
{
|
||||
name: 'account (arn) in Principal (effect Allow) -> grant access',
|
||||
statement: {
|
||||
Principal: {
|
||||
AWS: defaultAccountArn,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
valids: defaultValids,
|
||||
result: 'Allow',
|
||||
},
|
||||
{
|
||||
name: 'account (arn) in Principal (effect Deny) -> deny access',
|
||||
statement: {
|
||||
Principal: {
|
||||
AWS: [defaultAccountArn],
|
||||
},
|
||||
Effect: 'Deny',
|
||||
},
|
||||
valids: defaultValids,
|
||||
result: 'Deny',
|
||||
},
|
||||
{
|
||||
name: 'account (id) in Principal (effect Allow) -> grant access',
|
||||
statement: {
|
||||
Principal: {
|
||||
AWS: defaultAccountId,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
valids: defaultValids,
|
||||
result: 'Allow',
|
||||
},
|
||||
{
|
||||
name: 'account (id) as Principal (effect Deny) -> deny access',
|
||||
statement: {
|
||||
Principal: {
|
||||
AWS: defaultAccountId,
|
||||
},
|
||||
Effect: 'Deny',
|
||||
},
|
||||
valids: defaultValids,
|
||||
result: 'Deny',
|
||||
},
|
||||
{
|
||||
name: 'account not in Principal (effect Allow) -> neutral',
|
||||
statement: {
|
||||
Principal: {
|
||||
AWS: [anotherAccountId],
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
valids: defaultValids,
|
||||
result: 'Neutral',
|
||||
},
|
||||
{
|
||||
name: 'account not in Principal (effect Deny) -> neutral',
|
||||
statement: {
|
||||
Principal: {
|
||||
AWS: [anotherAccountId],
|
||||
},
|
||||
Effect: 'Deny',
|
||||
},
|
||||
valids: defaultValids,
|
||||
result: 'Neutral',
|
||||
},
|
||||
{
|
||||
name:
|
||||
'multiple account as Principal (effect Allow) -> grant access',
|
||||
statement: {
|
||||
Principal: {
|
||||
AWS: [anotherAccountId, defaultAccountId],
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
valids: defaultValids,
|
||||
result: 'Allow',
|
||||
},
|
||||
{
|
||||
name: 'anonymous as NotPrincipal (effect Allow) -> neutral',
|
||||
statement: {
|
||||
NotPrincipal: '*',
|
||||
Effect: 'Allow',
|
||||
},
|
||||
valids: defaultValids,
|
||||
result: 'Neutral',
|
||||
},
|
||||
{
|
||||
name: 'anonymous as NotPrincipal (effect Deny) -> neutral',
|
||||
statement: {
|
||||
NotPrincipal: '*',
|
||||
Effect: 'Allow',
|
||||
},
|
||||
valids: defaultValids,
|
||||
result: 'Neutral',
|
||||
},
|
||||
{
|
||||
name: 'account (arn) in NotPrincipal (effect Allow) -> neutral',
|
||||
statement: {
|
||||
NotPrincipal: {
|
||||
AWS: defaultAccountArn,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
valids: defaultValids,
|
||||
result: 'Neutral',
|
||||
},
|
||||
{
|
||||
name: 'account (arn) in NotPrincipal (effect Deny) -> neutral',
|
||||
statement: {
|
||||
NotPrincipal: {
|
||||
AWS: [anotherAccountArn, defaultAccountArn],
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
valids: defaultValids,
|
||||
result: 'Neutral',
|
||||
},
|
||||
{
|
||||
name: 'account (arn) not in NotPrincipal (effect Allow) -> ' +
|
||||
'grant access',
|
||||
statement: {
|
||||
NotPrincipal: {
|
||||
AWS: anotherAccountArn,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
valids: defaultValids,
|
||||
result: 'Allow',
|
||||
},
|
||||
{
|
||||
name: 'account (arn) not in NotPrincipal (effect Deny) -> ' +
|
||||
'deny access',
|
||||
statement: {
|
||||
NotPrincipal: {
|
||||
AWS: anotherAccountArn,
|
||||
},
|
||||
Effect: 'Deny',
|
||||
},
|
||||
valids: defaultValids,
|
||||
result: 'Deny',
|
||||
},
|
||||
{
|
||||
name: 'Other entities than AWS in principal (effect Allow) -> ' +
|
||||
'neutral',
|
||||
statement: {
|
||||
Principal: {
|
||||
Service: 'backbeat',
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
valids: defaultValids,
|
||||
result: 'Neutral',
|
||||
},
|
||||
{
|
||||
name: 'Other entities than AWS in principal (effect Deny) -> ' +
|
||||
'neutral',
|
||||
statement: {
|
||||
Principal: {
|
||||
Service: 'backbeat',
|
||||
},
|
||||
Effect: 'Deny',
|
||||
},
|
||||
valids: defaultValids,
|
||||
result: 'Neutral',
|
||||
},
|
||||
{
|
||||
name: 'Service in Principal (effect Allow) -> grant access',
|
||||
statement: {
|
||||
Principal: {
|
||||
Service: 'backbeat',
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
valids: {
|
||||
Service: 'backbeat',
|
||||
},
|
||||
result: 'Allow',
|
||||
},
|
||||
{
|
||||
name: 'User as principal (effect Allow) -> grant access',
|
||||
statement: {
|
||||
Principal: {
|
||||
AWS: `arn:aws:iam::${defaultAccountId}:user/test`,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
valids: {
|
||||
AWS: `arn:aws:iam::${defaultAccountId}:user/test`,
|
||||
},
|
||||
result: 'Allow',
|
||||
},
|
||||
{
|
||||
name: 'User not in Principal (effect Allow) -> neutral',
|
||||
statement: {
|
||||
Principal: {
|
||||
AWS: `arn:aws:iam::${defaultAccountId}:user/test`,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
valids: {
|
||||
AWS: `arn:aws:iam::${defaultAccountId}:user/another/testUser`,
|
||||
},
|
||||
result: 'Neutral',
|
||||
},
|
||||
{
|
||||
name: 'Role in Principal (effect Allow) -> grant access',
|
||||
statement: {
|
||||
Principal: {
|
||||
AWS: `arn:aws:iam::${defaultAccountId}:role/role1`,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
valids: {
|
||||
AWS: [
|
||||
`arn:aws:iam::${defaultAccountId}:role/role1`,
|
||||
`arn:aws:iam::${defaultAccountId}:assumed-role` +
|
||||
'/role1/session',
|
||||
],
|
||||
},
|
||||
result: 'Allow',
|
||||
},
|
||||
{
|
||||
name: 'Role in Principal (effect Deny) -> deny access',
|
||||
statement: {
|
||||
Principal: {
|
||||
AWS: `arn:aws:iam::${defaultAccountId}:role/role1`,
|
||||
},
|
||||
Effect: 'Deny',
|
||||
},
|
||||
valids: {
|
||||
AWS: [
|
||||
`arn:aws:iam::${defaultAccountId}:role/role1`,
|
||||
`arn:aws:iam::${defaultAccountId}:assumed-role` +
|
||||
'/role1/session',
|
||||
],
|
||||
},
|
||||
result: 'Deny',
|
||||
},
|
||||
].forEach(test => {
|
||||
it(`_evaluatePrincipalField(): ${test.name}`, () => {
|
||||
assert.strictEqual(Principal._evaluatePrincipalField(defaultParams,
|
||||
test.statement, test.valids), test.result);
|
||||
});
|
||||
});
|
||||
|
||||
[
|
||||
{
|
||||
name: 'should allow with a neutral',
|
||||
statement: [
|
||||
{
|
||||
Principal: {
|
||||
AWS: anotherAccountArn,
|
||||
},
|
||||
Effect: 'Deny',
|
||||
},
|
||||
{
|
||||
Principal: {
|
||||
AWS: defaultAccountArn,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
],
|
||||
valids: defaultValids,
|
||||
result: 'Allow',
|
||||
},
|
||||
{
|
||||
name: 'should deny even with an allow',
|
||||
statement: [
|
||||
{
|
||||
Principal: {
|
||||
AWS: defaultAccountArn,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
{
|
||||
Principal: {
|
||||
AWS: defaultAccountArn,
|
||||
},
|
||||
Effect: 'Deny',
|
||||
},
|
||||
],
|
||||
valids: defaultValids,
|
||||
result: 'Deny',
|
||||
},
|
||||
{
|
||||
name: 'should deny if no matches',
|
||||
statement: [
|
||||
{
|
||||
Principal: {
|
||||
AWS: anotherAccountArn,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
],
|
||||
valids: defaultValids,
|
||||
result: 'Deny',
|
||||
},
|
||||
].forEach(test => {
|
||||
it(`_evaluatePrincipal(): ${test.name}`, () => {
|
||||
const params = {
|
||||
log: defaultParams.log,
|
||||
trustedPolicy: {
|
||||
Statement: test.statement,
|
||||
},
|
||||
};
|
||||
const valids = test.valids;
|
||||
assert.strictEqual(Principal._evaluatePrincipal(params, valids),
|
||||
test.result);
|
||||
});
|
||||
});
|
||||
|
||||
[
|
||||
{
|
||||
name: 'should check user inside the same account',
|
||||
statement: [
|
||||
{
|
||||
Principal: {
|
||||
AWS: defaultUserArn,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
],
|
||||
requester: {
|
||||
accountId: defaultAccountId,
|
||||
arn: defaultUserArn,
|
||||
parentArn: null,
|
||||
userType: 'User',
|
||||
},
|
||||
target: {
|
||||
accountId: defaultAccountId,
|
||||
},
|
||||
result: {
|
||||
result: 'Allow',
|
||||
checkAction: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'should deny user inside the same account',
|
||||
statement: [
|
||||
{
|
||||
Principal: {
|
||||
AWS: defaultUserArn,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
],
|
||||
requester: {
|
||||
accountId: defaultAccountId,
|
||||
arn: `arn:aws:iam::${defaultAccountId}:user/anotherUser`,
|
||||
parentArn: null,
|
||||
userType: 'User',
|
||||
},
|
||||
target: {
|
||||
accountId: defaultAccountId,
|
||||
},
|
||||
result: {
|
||||
result: 'Deny',
|
||||
checkAction: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'should deny principal if account is deny',
|
||||
statement: [
|
||||
{
|
||||
Principal: {
|
||||
AWS: defaultAccountId,
|
||||
},
|
||||
Effect: 'Deny',
|
||||
},
|
||||
{
|
||||
Principal: {
|
||||
AWS: defaultUserArn,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
],
|
||||
requester: {
|
||||
accountId: defaultAccountId,
|
||||
arn: defaultUserArn,
|
||||
parentArn: null,
|
||||
userType: 'User',
|
||||
},
|
||||
target: {
|
||||
accountId: defaultAccountId,
|
||||
},
|
||||
result: {
|
||||
result: 'Deny',
|
||||
checkAction: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'should deny assumed role if role is deny',
|
||||
statement: [
|
||||
{
|
||||
Principal: {
|
||||
AWS: defaultRole,
|
||||
},
|
||||
Effect: 'Deny',
|
||||
},
|
||||
{
|
||||
Principal: {
|
||||
AWS: defaultAssumedRole,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
],
|
||||
requester: {
|
||||
accountId: defaultAccountId,
|
||||
arn: defaultAssumedRole,
|
||||
parentArn: defaultRole,
|
||||
userType: 'AssumedRole',
|
||||
},
|
||||
target: {
|
||||
accountId: defaultAccountId,
|
||||
},
|
||||
result: {
|
||||
result: 'Deny',
|
||||
checkAction: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'should deny user as principal if account is different',
|
||||
statement: [
|
||||
{
|
||||
Principal: {
|
||||
AWS: anotherUserArn,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
],
|
||||
requester: {
|
||||
accountId: anotherAccountId,
|
||||
arn: anotherUserArn,
|
||||
parentArn: null,
|
||||
userType: 'User',
|
||||
},
|
||||
target: {
|
||||
accountId: defaultAccountId,
|
||||
},
|
||||
result: {
|
||||
result: 'Deny',
|
||||
checkAction: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'should allow user if account is in principal',
|
||||
statement: [
|
||||
{
|
||||
Principal: {
|
||||
AWS: anotherAccountArn,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
],
|
||||
requester: {
|
||||
accountId: anotherAccountId,
|
||||
arn: anotherUserArn,
|
||||
parentArn: null,
|
||||
userType: 'User',
|
||||
},
|
||||
target: {
|
||||
accountId: defaultAccountId,
|
||||
},
|
||||
result: {
|
||||
result: 'Allow',
|
||||
checkAction: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'should allow service as principal',
|
||||
statement: [
|
||||
{
|
||||
Principal: {
|
||||
Service: 'backbeat',
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
],
|
||||
requester: {
|
||||
accountId: defaultAccountId,
|
||||
arn: 'backbeat',
|
||||
parentArn: null,
|
||||
userType: 'Service',
|
||||
},
|
||||
target: {
|
||||
accountId: defaultAccountId,
|
||||
},
|
||||
result: {
|
||||
result: 'Allow',
|
||||
checkAction: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'should allow federated provider',
|
||||
statement: [
|
||||
{
|
||||
Principal: {
|
||||
Federated: defaultSamlProvider,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
},
|
||||
],
|
||||
requester: {
|
||||
accountId: defaultAccountId,
|
||||
arn: defaultFederatedUser,
|
||||
parentArn: defaultSamlProvider,
|
||||
userType: 'Federated',
|
||||
},
|
||||
target: {
|
||||
accountId: defaultAccountId,
|
||||
},
|
||||
result: {
|
||||
result: 'Allow',
|
||||
checkAction: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'should not allow when external id not matching',
|
||||
statement: [
|
||||
{
|
||||
Principal: {
|
||||
AWS: anotherAccountId,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
Condition: {
|
||||
StringEquals: { 'sts:ExternalId': '12345' },
|
||||
},
|
||||
},
|
||||
],
|
||||
requester: {
|
||||
accountId: anotherAccountId,
|
||||
arn: anotherUserArn,
|
||||
parentArn: null,
|
||||
userType: 'User',
|
||||
},
|
||||
target: {
|
||||
accountId: defaultAccountId,
|
||||
},
|
||||
result: {
|
||||
result: 'Deny',
|
||||
checkAction: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'should allow when external id matching',
|
||||
statement: [
|
||||
{
|
||||
Principal: {
|
||||
AWS: anotherAccountId,
|
||||
},
|
||||
Effect: 'Allow',
|
||||
Condition: {
|
||||
StringEquals: { 'sts:ExternalId': '4321' },
|
||||
},
|
||||
},
|
||||
],
|
||||
requester: {
|
||||
accountId: anotherAccountId,
|
||||
arn: anotherUserArn,
|
||||
parentArn: null,
|
||||
userType: 'User',
|
||||
},
|
||||
target: {
|
||||
accountId: defaultAccountId,
|
||||
},
|
||||
result: {
|
||||
result: 'Allow',
|
||||
checkAction: true,
|
||||
},
|
||||
},
|
||||
].forEach(test => {
|
||||
it(`evaluatePrincipal(): ${test.name}`, () => {
|
||||
const rc = new RequestContext({}, {}, '', '', '127.0.0.1',
|
||||
false, 'assumeRole', 'sts', null, {
|
||||
accountid: test.requester.accountId,
|
||||
arn: test.requester.arn,
|
||||
parentArn: test.requester.parentArn,
|
||||
principalType: test.requester.userType,
|
||||
externalId: '4321',
|
||||
}, 'v4', 'V4');
|
||||
|
||||
const params = {
|
||||
log: defaultParams.log,
|
||||
trustedPolicy: {
|
||||
Statement: test.statement,
|
||||
},
|
||||
rc,
|
||||
targetAccountId: test.target.accountId,
|
||||
};
|
||||
const result = Principal.evaluatePrincipal(params);
|
||||
assert.deepStrictEqual(result, test.result);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1078,6 +1078,36 @@ describe('policyEvaluator', () => {
|
|||
check(requestContext, {}, policy, 'Allow');
|
||||
});
|
||||
|
||||
it('should allow policy arn if meet condition',
|
||||
() => {
|
||||
policy.Statement.Condition = {
|
||||
ArnLike: { 'iam:PolicyArn':
|
||||
['arn:aws:iam::012345678901:policy/dev/*'] },
|
||||
};
|
||||
requestContext.setRequesterInfo(
|
||||
{ accountid: '012345678901' });
|
||||
const rcModifiers = {
|
||||
_policyArn:
|
||||
'arn:aws:iam::012345678901:policy/dev/devMachine1',
|
||||
};
|
||||
check(requestContext, rcModifiers, policy, 'Allow');
|
||||
});
|
||||
|
||||
it('should not allow policy arn if do not meet condition',
|
||||
() => {
|
||||
policy.Statement.Condition = {
|
||||
ArnLike: { 'iam:PolicyArn':
|
||||
['arn:aws:iam::012345678901:policy/dev/*'] },
|
||||
};
|
||||
requestContext.setRequesterInfo(
|
||||
{ accountid: '012345678901' });
|
||||
const rcModifiers = {
|
||||
_policyArn:
|
||||
'arn:aws:iam::012345678901:policy/admin/deleteUser',
|
||||
};
|
||||
check(requestContext, rcModifiers, policy, 'Neutral');
|
||||
});
|
||||
|
||||
it('should allow access with multiple operator conditions ' +
|
||||
'and multiple conditions under an operator',
|
||||
() => {
|
||||
|
@ -1232,11 +1262,13 @@ describe('handleWildcards', () => {
|
|||
assert.deepStrictEqual(result, '^abc\\*abc\\?abc\\$$');
|
||||
});
|
||||
|
||||
/* eslint-disable no-useless-escape */
|
||||
it('should escape other regular expression special characters', () => {
|
||||
const result = handleWildcards('*^.+?()|[\]{}');
|
||||
assert.deepStrictEqual(result,
|
||||
'^.*?\\^\\.\\+.{1}\\(\\)\\|\\[\\\]\\{\\}$');
|
||||
});
|
||||
/* eslint-enable */
|
||||
});
|
||||
|
||||
describe('substituteVariables', () => {
|
||||
|
|
|
@ -0,0 +1,73 @@
|
|||
const assert = require('assert');
|
||||
|
||||
const azureMpuUtils =
|
||||
require('../../../../lib/s3middleware/azureHelpers/mpuUtils');
|
||||
const padString = azureMpuUtils.padString;
|
||||
const getSubPartInfo = azureMpuUtils.getSubPartInfo;
|
||||
|
||||
const padStringTests = [
|
||||
{
|
||||
category: 'partNumber',
|
||||
strings: [1, 10, 100, 10000],
|
||||
expectedResults: ['00001', '00010', '00100', '10000'],
|
||||
}, {
|
||||
category: 'subPart',
|
||||
strings: [1, 50],
|
||||
expectedResults: ['01', '50'],
|
||||
}, {
|
||||
category: 'part',
|
||||
strings: ['test|'],
|
||||
expectedResults:
|
||||
['test|%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'],
|
||||
},
|
||||
];
|
||||
|
||||
const oneMb = 1024 * 1024;
|
||||
const oneHundredMb = oneMb * 100;
|
||||
const subPartInfoTests = [
|
||||
{
|
||||
desc: '100 mb',
|
||||
size: oneHundredMb,
|
||||
expectedNumberSubParts: 1,
|
||||
expectedLastPartSize: oneHundredMb,
|
||||
}, {
|
||||
desc: '101 mb',
|
||||
size: oneHundredMb + oneMb,
|
||||
expectedNumberSubParts: 2,
|
||||
expectedLastPartSize: oneMb,
|
||||
}, {
|
||||
desc: '599 mb',
|
||||
size: 6 * oneHundredMb - oneMb,
|
||||
expectedNumberSubParts: 6,
|
||||
expectedLastPartSize: oneHundredMb - oneMb,
|
||||
}, {
|
||||
desc: '600 mb',
|
||||
size: 6 * oneHundredMb,
|
||||
expectedNumberSubParts: 6,
|
||||
expectedLastPartSize: oneHundredMb,
|
||||
},
|
||||
];
|
||||
|
||||
describe('s3middleware Azure MPU helper utility function', () => {
|
||||
padStringTests.forEach(test => {
|
||||
it(`padString should pad a ${test.category}`, done => {
|
||||
const result = test.strings.map(str =>
|
||||
padString(str, test.category));
|
||||
assert.deepStrictEqual(result, test.expectedResults);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
subPartInfoTests.forEach(test => {
|
||||
const { desc, size, expectedNumberSubParts, expectedLastPartSize }
|
||||
= test;
|
||||
it('getSubPartInfo should return correct result for ' +
|
||||
`dataContentLength of ${desc}`, done => {
|
||||
const result = getSubPartInfo(size);
|
||||
const expectedLastPartIndex = expectedNumberSubParts - 1;
|
||||
assert.strictEqual(result.lastPartIndex, expectedLastPartIndex);
|
||||
assert.strictEqual(result.lastPartSize, expectedLastPartSize);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,24 @@
|
|||
const assert = require('assert');
|
||||
const crypto = require('crypto');
|
||||
|
||||
const objectUtils =
|
||||
require('../../../lib/s3middleware/objectUtils');
|
||||
|
||||
const hexHash = 'd41d8cd98f00b204e9800998ecf8427e';
|
||||
const base64Hash = '1B2M2Y8AsgTpgAmY7PhCfg==';
|
||||
|
||||
describe('s3middleware object utilites', () => {
|
||||
it('should convert hexademal MD5 to base 64', done => {
|
||||
const hash = crypto.createHash('md5').digest('hex');
|
||||
const convertedHash = objectUtils.getBase64MD5(hash);
|
||||
assert.strictEqual(convertedHash, base64Hash);
|
||||
done();
|
||||
});
|
||||
|
||||
it('should convert base 64 MD5 to hexadecimal', done => {
|
||||
const hash = crypto.createHash('md5').digest('base64');
|
||||
const convertedHash = objectUtils.getHexMD5(hash);
|
||||
assert.strictEqual(convertedHash, hexHash);
|
||||
done();
|
||||
});
|
||||
});
|
Loading…
Reference in New Issue