Compare commits

..

No commits in common. "bcabdeeadb3a1070691ab856aa2d4e96c710d895" and "3c54bd740fcc179d1ea500fbc230cc759e80cde9" have entirely different histories.

77 changed files with 915 additions and 3294 deletions

4
.gitignore vendored
View File

@ -1,5 +1 @@
# Logs
*.log
# Dependency directory
node_modules/ node_modules/

View File

@ -7,8 +7,6 @@ general:
machine: machine:
node: node:
version: 6.9.5 version: 6.9.5
services:
- redis
environment: environment:
CXX: g++-4.9 CXX: g++-4.9

View File

@ -29,7 +29,6 @@ module.exports = {
evaluators: require('./lib/policyEvaluator/evaluator.js'), evaluators: require('./lib/policyEvaluator/evaluator.js'),
validateUserPolicy: require('./lib/policy/policyValidator') validateUserPolicy: require('./lib/policy/policyValidator')
.validateUserPolicy, .validateUserPolicy,
evaluatePrincipal: require('./lib/policyEvaluator/principal'),
RequestContext: require('./lib/policyEvaluator/RequestContext.js'), RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
}, },
Clustering: require('./lib/Clustering'), Clustering: require('./lib/Clustering'),
@ -60,22 +59,12 @@ module.exports = {
}, },
s3middleware: { s3middleware: {
userMetadata: require('./lib/s3middleware/userMetadata'), userMetadata: require('./lib/s3middleware/userMetadata'),
convertToXml: require('./lib/s3middleware/convertToXml'),
escapeForXml: require('./lib/s3middleware/escapeForXml'), escapeForXml: require('./lib/s3middleware/escapeForXml'),
tagging: require('./lib/s3middleware/tagging'), tagging: require('./lib/s3middleware/tagging'),
validateConditionalHeaders: validateConditionalHeaders:
require('./lib/s3middleware/validateConditionalHeaders') require('./lib/s3middleware/validateConditionalHeaders')
.validateConditionalHeaders, .validateConditionalHeaders,
MD5Sum: require('./lib/s3middleware/MD5Sum'), MD5Sum: require('./lib/s3middleware/MD5Sum'),
objectUtils: require('./lib/s3middleware/objectUtils'),
azureHelper: {
mpuUtils:
require('./lib/s3middleware/azureHelpers/mpuUtils'),
ResultsCollector:
require('./lib/s3middleware/azureHelpers/ResultsCollector'),
SubStreamInterface:
require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
},
}, },
storage: { storage: {
metadata: { metadata: {
@ -94,17 +83,12 @@ module.exports = {
}, },
utils: require('./lib/storage/utils'), utils: require('./lib/storage/utils'),
}, },
models: { models: {
BucketInfo: require('./lib/models/BucketInfo'), BucketInfo: require('./lib/models/BucketInfo'),
ObjectMD: require('./lib/models/ObjectMD'), ObjectMD: require('./lib/models/ObjectMD'),
ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
ARN: require('./lib/models/ARN'),
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'), WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
ReplicationConfiguration: ReplicationConfiguration:
require('./lib/models/ReplicationConfiguration'), require('./lib/models/ReplicationConfiguration'),
}, },
metrics: {
StatsClient: require('./lib/metrics/StatsClient'),
RedisClient: require('./lib/metrics/RedisClient'),
},
}; };

View File

@ -49,14 +49,6 @@ class AuthInfo {
isRequesterPublicUser() { isRequesterPublicUser() {
return this.canonicalID === constants.publicId; return this.canonicalID === constants.publicId;
} }
isRequesterAServiceAccount() {
return this.canonicalID.startsWith(
`${constants.zenkoServiceAccount}/`);
}
isRequesterThisServiceAccount(serviceName) {
return this.canonicalID ===
`${constants.zenkoServiceAccount}/${serviceName}`;
}
} }
module.exports = AuthInfo; module.exports = AuthInfo;

View File

@ -12,7 +12,6 @@ const convertUTCtoISO8601 = require('./v4/timeUtils').convertUTCtoISO8601;
const vaultUtilities = require('./in_memory/vaultUtilities'); const vaultUtilities = require('./in_memory/vaultUtilities');
const backend = require('./in_memory/Backend'); const backend = require('./in_memory/Backend');
const validateAuthConfig = require('./in_memory/validateAuthConfig'); const validateAuthConfig = require('./in_memory/validateAuthConfig');
const AuthLoader = require('./in_memory/AuthLoader');
const Vault = require('./Vault'); const Vault = require('./Vault');
let vault = null; let vault = null;
@ -153,11 +152,10 @@ function doAuth(request, log, cb, awsService, requestContexts) {
* @param {string} accessKey - the accessKey * @param {string} accessKey - the accessKey
* @param {string} secretKeyValue - the secretKey * @param {string} secretKeyValue - the secretKey
* @param {string} awsService - Aws service related * @param {string} awsService - Aws service related
* @param {sting} [proxyPath] - path that gets proxied by reverse proxy
* @return {undefined} * @return {undefined}
*/ */
function generateV4Headers(request, data, accessKey, secretKeyValue, function generateV4Headers(request, data, accessKey, secretKeyValue,
awsService, proxyPath) { awsService) {
Object.assign(request, { headers: {} }); Object.assign(request, { headers: {} });
const amzDate = convertUTCtoISO8601(Date.now()); const amzDate = convertUTCtoISO8601(Date.now());
// get date without time // get date without time
@ -189,7 +187,7 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
).sort().join(';'); ).sort().join(';');
const params = { request, signedHeaders, payloadChecksum, const params = { request, signedHeaders, payloadChecksum,
credentialScope, timestamp, query: data, credentialScope, timestamp, query: data,
awsService: service, proxyPath }; awsService: service };
const stringToSign = constructStringToSignV4(params); const stringToSign = constructStringToSignV4(params);
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue, const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue,
region, region,
@ -216,7 +214,6 @@ module.exports = {
inMemory: { inMemory: {
backend, backend,
validateAuthConfig, validateAuthConfig,
AuthLoader,
}, },
AuthInfo, AuthInfo,
Vault, Vault,

View File

@ -1,223 +0,0 @@
const fs = require('fs');
const glob = require('simple-glob');
const joi = require('joi');
const werelogs = require('werelogs');
const ARN = require('../../models/ARN');
/**
* Load authentication information from files or pre-loaded account
* objects
*
* @class AuthLoader
*/
class AuthLoader {
constructor(logApi) {
this._log = new (logApi || werelogs).Logger('S3');
this._authData = { accounts: [] };
// null: unknown validity, true/false: valid or invalid
this._isValid = null;
this._joiKeysValidator = joi.array()
.items({
access: joi.string().required(),
secret: joi.string().required(),
})
.required();
const accountsJoi = joi.array()
.items({
name: joi.string().required(),
email: joi.string().email().required(),
arn: joi.string().required(),
canonicalID: joi.string().required(),
shortid: joi.string().regex(/^[0-9]{12}$/).required(),
keys: this._joiKeysValidator,
// backward-compat
users: joi.array(),
})
.required()
.unique('arn')
.unique('email')
.unique('canonicalID');
this._joiValidator = joi.object({ accounts: accountsJoi });
}
/**
* add one or more accounts to the authentication info
*
* @param {object} authData - authentication data
* @param {object[]} authData.accounts - array of account data
* @param {string} authData.accounts[].name - account name
* @param {string} authData.accounts[].email: email address
* @param {string} authData.accounts[].arn: account ARN,
* e.g. 'arn:aws:iam::123456789012:root'
* @param {string} authData.accounts[].canonicalID account
* canonical ID
* @param {string} authData.accounts[].shortid account ID number,
* e.g. '123456789012'
* @param {object[]} authData.accounts[].keys array of
* access/secret keys
* @param {object[]} authData.accounts[].keys[].access access key
* @param {object[]} authData.accounts[].keys[].secret secret key
* @param {string} [filePath] - optional file path info for
* logging purpose
* @return {undefined}
*/
addAccounts(authData, filePath) {
const isValid = this._validateData(authData, filePath);
if (isValid) {
this._authData.accounts =
this._authData.accounts.concat(authData.accounts);
// defer validity checking when getting data to avoid
// logging multiple times the errors (we need to validate
// all accounts at once to detect duplicate values)
if (this._isValid) {
this._isValid = null;
}
} else {
this._isValid = false;
}
}
/**
* add account information from a file
*
* @param {string} filePath - file path containing JSON
* authentication info (see {@link addAccounts()} for format)
* @return {undefined}
*/
addFile(filePath) {
const authData = JSON.parse(fs.readFileSync(filePath));
this.addAccounts(authData, filePath);
}
/**
* add account information from a filesystem path
*
* @param {string|string[]} globPattern - filesystem glob pattern,
* can be a single string or an array of glob patterns. Globs
* can be simple file paths or can contain glob matching
* characters, like '/a/b/*.json'. The matching files are
* individually loaded as JSON and accounts are added. See
* {@link addAccounts()} for JSON format.
* @return {undefined}
*/
addFilesByGlob(globPattern) {
const files = glob(globPattern);
files.forEach(filePath => this.addFile(filePath));
}
/**
* perform validation on authentication info previously
* loaded. Note that it has to be done on the entire set after an
* update to catch duplicate account IDs or access keys.
*
* @return {boolean} true if authentication info is valid
* false otherwise
*/
validate() {
if (this._isValid === null) {
this._isValid = this._validateData(this._authData);
}
return this._isValid;
}
/**
* get authentication info as a plain JS object containing all accounts
* under the "accounts" attribute, with validation.
*
* @return {object|null} the validated authentication data
* null if invalid
*/
getData() {
return this.validate() ? this._authData : null;
}
_validateData(authData, filePath) {
const res = joi.validate(authData, this._joiValidator,
{ abortEarly: false });
if (res.error) {
this._dumpJoiErrors(res.error.details, filePath);
return false;
}
let allKeys = [];
let arnError = false;
const validatedAuth = res.value;
validatedAuth.accounts.forEach(account => {
// backward-compat: ignore arn if starts with 'aws:' and log a
// warning
if (account.arn.startsWith('aws:')) {
this._log.error(
'account must have a valid AWS ARN, legacy examples ' +
'starting with \'aws:\' are not supported anymore. ' +
'Please convert to a proper account entry (see ' +
'examples at https://github.com/scality/S3/blob/' +
'master/conf/authdata.json). Also note that support ' +
'for account users has been dropped.',
{ accountName: account.name, accountArn: account.arn,
filePath });
arnError = true;
return;
}
if (account.users) {
this._log.error(
'support for account users has been dropped, consider ' +
'turning users into account entries (see examples at ' +
'https://github.com/scality/S3/blob/master/conf/' +
'authdata.json)',
{ accountName: account.name, accountArn: account.arn,
filePath });
arnError = true;
return;
}
const arnObj = ARN.createFromString(account.arn);
if (arnObj.error) {
this._log.error(
'authentication config validation error',
{ reason: arnObj.error.description,
accountName: account.name, accountArn: account.arn,
filePath });
arnError = true;
return;
}
if (!arnObj.isIAMAccount()) {
this._log.error(
'authentication config validation error',
{ reason: 'not an IAM account ARN',
accountName: account.name, accountArn: account.arn,
filePath });
arnError = true;
return;
}
allKeys = allKeys.concat(account.keys);
});
if (arnError) {
return false;
}
const uniqueKeysRes = joi.validate(
allKeys, this._joiKeysValidator.unique('access'));
if (uniqueKeysRes.error) {
this._dumpJoiErrors(uniqueKeysRes.error.details, filePath);
return false;
}
return true;
}
_dumpJoiErrors(errors, filePath) {
errors.forEach(err => {
const logInfo = { item: err.path, filePath };
if (err.type === 'array.unique') {
logInfo.reason = `duplicate value '${err.context.path}'`;
logInfo.dupValue = err.context.value[err.context.path];
} else {
logInfo.reason = err.message;
logInfo.context = err.context;
}
this._log.error('authentication config validation error',
logInfo);
});
}
}
module.exports = AuthLoader;

View File

@ -7,6 +7,10 @@ const calculateSigningKey = require('./vaultUtilities').calculateSigningKey;
const hashSignature = require('./vaultUtilities').hashSignature; const hashSignature = require('./vaultUtilities').hashSignature;
const Indexer = require('./Indexer'); const Indexer = require('./Indexer');
function _buildArn(service, generalResource, specificResource) {
return `arn:aws:${service}:::${generalResource}/${specificResource}`;
}
function _formatResponse(userInfoToSend) { function _formatResponse(userInfoToSend) {
return { return {
message: { message: {
@ -38,7 +42,7 @@ class Backend {
/** verifySignatureV2 /** verifySignatureV2
* @param {string} stringToSign - string to sign built per AWS rules * @param {string} stringToSign - string to sign built per AWS rules
* @param {string} signatureFromRequest - signature sent with request * @param {string} signatureFromRequest - signature sent with request
* @param {string} accessKey - account accessKey * @param {string} accessKey - user's accessKey
* @param {object} options - contains algorithm (SHA1 or SHA256) * @param {object} options - contains algorithm (SHA1 or SHA256)
* @param {function} callback - callback with either error or user info * @param {function} callback - callback with either error or user info
* @return {function} calls callback * @return {function} calls callback
@ -69,7 +73,7 @@ class Backend {
/** verifySignatureV4 /** verifySignatureV4
* @param {string} stringToSign - string to sign built per AWS rules * @param {string} stringToSign - string to sign built per AWS rules
* @param {string} signatureFromRequest - signature sent with request * @param {string} signatureFromRequest - signature sent with request
* @param {string} accessKey - account accessKey * @param {string} accessKey - user's accessKey
* @param {string} region - region specified in request credential * @param {string} region - region specified in request credential
* @param {string} scopeDate - date specified in request credential * @param {string} scopeDate - date specified in request credential
* @param {object} options - options to send to Vault * @param {object} options - options to send to Vault
@ -157,6 +161,55 @@ class Backend {
}; };
return cb(null, vaultReturnObject); return cb(null, vaultReturnObject);
} }
/**
* Mocks Vault's response to a policy evaluation request
* Since policies not actually implemented in memory backend,
* we allow users to proceed with request.
* @param {object} requestContextParams - parameters needed to construct
* requestContext in Vault
* @param {object} requestContextParams.constantParams -
* params that have the
* same value for each requestContext to be constructed in Vault
* @param {object} requestContextParams.paramaterize - params that have
* arrays as values since a requestContext needs to be constructed with
* each option in Vault
* @param {object[]} requestContextParams.paramaterize.specificResource -
* specific resources paramaterized as an array of objects containing
* properties `key` and optional `versionId`
* @param {string} userArn - arn of requesting user
* @param {object} log - log object
* @param {function} cb - callback with either error or an array
* of authorization results
* @returns {undefined}
* @callback called with (err, vaultReturnObject)
*/
checkPolicies(requestContextParams, userArn, log, cb) {
let results;
const parameterizeParams = requestContextParams.parameterize;
if (parameterizeParams && parameterizeParams.specificResource) {
// object is parameterized
results = parameterizeParams.specificResource.map(obj => ({
isAllowed: true,
arn: _buildArn(this.service, requestContextParams
.constantParams.generalResource, obj.key),
versionId: obj.versionId,
}));
} else {
results = [{
isAllowed: true,
arn: _buildArn(this.service, requestContextParams
.constantParams.generalResource, requestContextParams
.constantParams.specificResource),
}];
}
const vaultReturnObject = {
message: {
body: results,
},
};
return cb(null, vaultReturnObject);
}
} }
@ -173,6 +226,9 @@ class S3AuthBackend extends Backend {
* @param {object[]=} authdata.accounts[].keys - array of key objects * @param {object[]=} authdata.accounts[].keys - array of key objects
* @param {string} authdata.accounts[].keys[].access - access key * @param {string} authdata.accounts[].keys[].access - access key
* @param {string} authdata.accounts[].keys[].secret - secret key * @param {string} authdata.accounts[].keys[].secret - secret key
* @param {object[]=} authdata.accounts[].users - array of user objects:
* note, same properties as account except no canonical ID / sas token
* @param {string=} authdata.accounts[].sasToken - Azure SAS token
* @return {undefined} * @return {undefined}
*/ */
constructor(authdata) { constructor(authdata) {

View File

@ -19,6 +19,9 @@ class Indexer {
* @param {object[]=} authdata.accounts[].keys - array of key objects * @param {object[]=} authdata.accounts[].keys - array of key objects
* @param {string} authdata.accounts[].keys[].access - access key * @param {string} authdata.accounts[].keys[].access - access key
* @param {string} authdata.accounts[].keys[].secret - secret key * @param {string} authdata.accounts[].keys[].secret - secret key
* @param {object[]=} authdata.accounts[].users - array of user objects:
* note, same properties as account except no canonical ID / sas token
* @param {string=} authdata.accounts[].sasToken - Azure SAS token
* @return {undefined} * @return {undefined}
*/ */
constructor(authdata) { constructor(authdata) {
@ -27,6 +30,10 @@ class Indexer {
accessKey: {}, accessKey: {},
email: {}, email: {},
}; };
this.usersBy = {
accessKey: {},
email: {},
};
/* /*
* This may happen if the application is configured to use another * This may happen if the application is configured to use another
@ -40,6 +47,23 @@ class Indexer {
this._build(authdata); this._build(authdata);
} }
_indexUser(account, user) {
const userData = {
arn: account.arn,
canonicalID: account.canonicalID,
shortid: account.shortid,
accountDisplayName: account.accountDisplayName,
IAMdisplayName: user.name,
email: user.email.toLowerCase(),
keys: [],
};
this.usersBy.email[userData.email] = userData;
user.keys.forEach(key => {
userData.keys.push(key);
this.usersBy.accessKey[key.access] = userData;
});
}
_indexAccount(account) { _indexAccount(account) {
const accountData = { const accountData = {
arn: account.arn, arn: account.arn,
@ -57,6 +81,11 @@ class Indexer {
this.accountsBy.accessKey[key.access] = accountData; this.accountsBy.accessKey[key.access] = accountData;
}); });
} }
if (account.users !== undefined) {
account.users.forEach(user => {
this._indexUser(accountData, user);
});
}
} }
_build(authdata) { _build(authdata) {
@ -97,8 +126,11 @@ class Indexer {
* @return {Object} entity.email - The entity's lowercased email * @return {Object} entity.email - The entity's lowercased email
*/ */
getEntityByKey(key) { getEntityByKey(key) {
if (this.accountsBy.accessKey.hasOwnProperty(key)) {
return this.accountsBy.accessKey[key]; return this.accountsBy.accessKey[key];
} }
return this.usersBy.accessKey[key];
}
/** /**
* This method returns the entity (either an account or a user) associated * This method returns the entity (either an account or a user) associated
@ -118,6 +150,9 @@ class Indexer {
*/ */
getEntityByEmail(email) { getEntityByEmail(email) {
const lowerCasedEmail = email.toLowerCase(); const lowerCasedEmail = email.toLowerCase();
if (this.usersBy.email.hasOwnProperty(lowerCasedEmail)) {
return this.usersBy.email[lowerCasedEmail];
}
return this.accountsBy.email[lowerCasedEmail]; return this.accountsBy.email[lowerCasedEmail];
} }

View File

@ -1,18 +1,194 @@
const AuthLoader = require('./AuthLoader'); const werelogs = require('werelogs');
function _incr(count) {
if (count !== undefined) {
return count + 1;
}
return 1;
}
/** /**
* @deprecated please use {@link AuthLoader} class instead * This function ensures that the field `name` inside `container` is of the
* expected `type` inside `obj`. If any error is found, an entry is added into
* the error collector object.
* *
* @param {object} data - the error collector object
* @param {string} container - the name of the entity that contains
* what we're checking
* @param {string} name - the name of the entity we're checking for
* @param {string} type - expected typename of the entity we're checking
* @param {object} obj - the object we're checking the fields of
* @return {boolean} true if the type is Ok and no error found
* false if an error was found and reported
*/
function _checkType(data, container, name, type, obj) {
if ((type === 'array' && !Array.isArray(obj[name]))
|| (type !== 'array' && typeof obj[name] !== type)) {
data.errors.push({
txt: 'property is not of the expected type',
obj: {
entity: container,
property: name,
type: typeof obj[name],
expectedType: type,
},
});
return false;
}
return true;
}
/**
* This function ensures that the field `name` inside `obj` which is a
* `container`. If any error is found, an entry is added into the error
* collector object.
*
* @param {object} data - the error collector object
* @param {string} container - the name of the entity that contains
* what we're checking
* @param {string} name - the name of the entity we're checking for
* @param {string} type - expected typename of the entity we're checking
* @param {object} obj - the object we're checking the fields of
* @return {boolean} true if the field exists and type is Ok
* false if an error was found and reported
*/
function _checkExists(data, container, name, type, obj) {
if (obj[name] === undefined) {
data.errors.push({
txt: 'missing property in auth entity',
obj: {
entity: container,
property: name,
},
});
return false;
}
return _checkType(data, container, name, type, obj);
}
function _checkUser(data, userObj) {
if (_checkExists(data, 'User', 'arn', 'string', userObj)) {
// eslint-disable-next-line no-param-reassign
data.arns[userObj.arn] = _incr(data.arns[userObj.arn]);
}
if (_checkExists(data, 'User', 'email', 'string', userObj)) {
// eslint-disable-next-line no-param-reassign
data.emails[userObj.email] = _incr(data.emails[userObj.email]);
}
if (_checkExists(data, 'User', 'keys', 'array', userObj)) {
userObj.keys.forEach(keyObj => {
// eslint-disable-next-line no-param-reassign
data.keys[keyObj.access] = _incr(data.keys[keyObj.access]);
});
}
}
function _checkAccount(data, accountObj, checkSas) {
if (_checkExists(data, 'Account', 'email', 'string', accountObj)) {
// eslint-disable-next-line no-param-reassign
data.emails[accountObj.email] = _incr(data.emails[accountObj.email]);
}
if (_checkExists(data, 'Account', 'arn', 'string', accountObj)) {
// eslint-disable-next-line no-param-reassign
data.arns[accountObj.arn] = _incr(data.arns[accountObj.arn]);
}
if (_checkExists(data, 'Account', 'canonicalID', 'string', accountObj)) {
// eslint-disable-next-line no-param-reassign
data.canonicalIds[accountObj.canonicalID] =
_incr(data.canonicalIds[accountObj.canonicalID]);
}
if (checkSas &&
_checkExists(data, 'Account', 'sasToken', 'string', accountObj)) {
// eslint-disable-next-line no-param-reassign
data.sasTokens[accountObj.sasToken] =
_incr(data.sasTokens[accountObj.sasToken]);
}
if (accountObj.users) {
if (_checkType(data, 'Account', 'users', 'array', accountObj)) {
accountObj.users.forEach(userObj => _checkUser(data, userObj));
}
}
if (accountObj.keys) {
if (_checkType(data, 'Account', 'keys', 'array', accountObj)) {
accountObj.keys.forEach(keyObj => {
// eslint-disable-next-line no-param-reassign
data.keys[keyObj.access] = _incr(data.keys[keyObj.access]);
});
}
}
}
function _dumpCountError(property, obj, log) {
let count = 0;
Object.keys(obj).forEach(key => {
if (obj[key] > 1) {
log.error('property should be unique', {
property,
value: key,
count: obj[key],
});
++count;
}
});
return count;
}
function _dumpErrors(checkData, log) {
let nerr = _dumpCountError('CanonicalID', checkData.canonicalIds, log);
nerr += _dumpCountError('Email', checkData.emails, log);
nerr += _dumpCountError('ARN', checkData.arns, log);
nerr += _dumpCountError('AccessKey', checkData.keys, log);
nerr += _dumpCountError('SAS Token', checkData.sasTokens, log);
if (checkData.errors.length > 0) {
checkData.errors.forEach(msg => {
log.error(msg.txt, msg.obj);
});
}
if (checkData.errors.length === 0 && nerr === 0) {
return false;
}
log.fatal('invalid authentication config file (cannot start)');
return true;
}
/**
* @param {object} authdata - the authentication config file's data * @param {object} authdata - the authentication config file's data
* @param {werelogs.API} logApi - object providing a constructor function * @param {werelogs.API} logApi - object providing a constructor function
* for the Logger object * for the Logger object
* @param {(boolean|null)} checkSas - whether to check Azure SAS for ea. account
* @return {boolean} true on erroneous data * @return {boolean} true on erroneous data
* false on success * false on success
*/ */
function validateAuthConfig(authdata, logApi) { function validateAuthConfig(authdata, logApi, checkSas) {
const authLoader = new AuthLoader(logApi); const checkData = {
authLoader.addAccounts(authdata); errors: [],
return !authLoader.validate(); emails: [],
arns: [],
canonicalIds: [],
keys: [],
sasTokens: [],
};
const log = new (logApi || werelogs).Logger('S3');
if (authdata.accounts === undefined) {
checkData.errors.push({
txt: 'no "accounts" array defined in Auth config',
});
return _dumpErrors(checkData, log);
}
authdata.accounts.forEach(account => {
_checkAccount(checkData, account, checkSas);
});
return _dumpErrors(checkData, log);
} }
module.exports = validateAuthConfig; module.exports = validateAuthConfig;

View File

@ -27,7 +27,7 @@ function check(request, log, data) {
milliseconds to compare to Date.now() milliseconds to compare to Date.now()
*/ */
const expirationTime = parseInt(data.Expires, 10) * 1000; const expirationTime = parseInt(data.Expires, 10) * 1000;
if (Number.isNaN(expirationTime)) { if (isNaN(expirationTime)) {
log.debug('invalid expires parameter', log.debug('invalid expires parameter',
{ expires: data.Expires }); { expires: data.Expires });
return { err: errors.MissingSecurityHeader }; return { err: errors.MissingSecurityHeader };

View File

@ -10,13 +10,17 @@ const createCanonicalRequest = require('./createCanonicalRequest');
* @returns {string} - stringToSign * @returns {string} - stringToSign
*/ */
function constructStringToSign(params) { function constructStringToSign(params) {
const { request, signedHeaders, payloadChecksum, credentialScope, timestamp, const request = params.request;
query, log, proxyPath } = params; const signedHeaders = params.signedHeaders;
const path = proxyPath || request.path; const payloadChecksum = params.payloadChecksum;
const credentialScope = params.credentialScope;
const timestamp = params.timestamp;
const query = params.query;
const log = params.log;
const canonicalReqResult = createCanonicalRequest({ const canonicalReqResult = createCanonicalRequest({
pHttpVerb: request.method, pHttpVerb: request.method,
pResource: path, pResource: request.path,
pQuery: query, pQuery: query,
pHeaders: request.headers, pHeaders: request.headers,
pSignedHeaders: signedHeaders, pSignedHeaders: signedHeaders,

View File

@ -40,8 +40,7 @@ function createCanonicalRequest(params) {
// canonical query string // canonical query string
let canonicalQueryStr = ''; let canonicalQueryStr = '';
if (pQuery && !((service === 'iam' || service === 'ring' || if (pQuery && !((service === 'iam' || service === 'ring') &&
service === 'sts') &&
pHttpVerb === 'POST')) { pHttpVerb === 'POST')) {
const sortedQueryParams = Object.keys(pQuery).sort().map(key => { const sortedQueryParams = Object.keys(pQuery).sort().map(key => {
const encodedKey = awsURIencode(key); const encodedKey = awsURIencode(key);

View File

@ -41,9 +41,8 @@ function validateCredentials(credentials, timestamp, log) {
{ scopeDate, timestampDate }); { scopeDate, timestampDate });
return errors.RequestTimeTooSkewed; return errors.RequestTimeTooSkewed;
} }
if (service !== 's3' && service !== 'iam' && service !== 'ring' && if (service !== 's3' && service !== 'iam' && service !== 'ring') {
service !== 'sts') { log.warn('service in credentials is not one of s3/iam/ring', {
log.warn('service in credentials is not one of s3/iam/ring/sts', {
service, service,
}); });
return errors.InvalidArgument; return errors.InvalidArgument;

View File

@ -20,7 +20,6 @@ module.exports = {
// no authentication information. Requestor can access // no authentication information. Requestor can access
// only public resources // only public resources
publicId: 'http://acs.amazonaws.com/groups/global/AllUsers', publicId: 'http://acs.amazonaws.com/groups/global/AllUsers',
zenkoServiceAccount: 'http://acs.zenko.io/accounts/service',
metadataFileNamespace: '/MDFile', metadataFileNamespace: '/MDFile',
dataFileURL: '/DataFile', dataFileURL: '/DataFile',
// AWS states max size for user-defined metadata // AWS states max size for user-defined metadata
@ -30,41 +29,4 @@ module.exports = {
// so we do the same. // so we do the same.
maximumMetaHeadersSize: 2136, maximumMetaHeadersSize: 2136,
emptyFileMd5: 'd41d8cd98f00b204e9800998ecf8427e', emptyFileMd5: 'd41d8cd98f00b204e9800998ecf8427e',
// Version 2 changes the format of the data location property
// Version 3 adds the dataStoreName attribute
mdModelVersion: 3,
/*
* Splitter is used to build the object name for the overview of a
* multipart upload and to build the object names for each part of a
* multipart upload. These objects with large names are then stored in
* metadata in a "shadow bucket" to a real bucket. The shadow bucket
* contains all ongoing multipart uploads. We include in the object
* name some of the info we might need to pull about an open multipart
* upload or about an individual part with each piece of info separated
* by the splitter. We can then extract each piece of info by splitting
* the object name string with this splitter.
* For instance, assuming a splitter of '...!*!',
* the name of the upload overview would be:
* overview...!*!objectKey...!*!uploadId
* For instance, the name of a part would be:
* uploadId...!*!partNumber
*
* The sequence of characters used in the splitter should not occur
* elsewhere in the pieces of info to avoid splitting where not
* intended.
*
* Splitter is also used in adding bucketnames to the
* namespacerusersbucket. The object names added to the
* namespaceusersbucket are of the form:
* canonicalID...!*!bucketname
*/
splitter: '..|..',
usersBucket: 'users..bucket',
// MPU Bucket Prefix is used to create the name of the shadow
// bucket used for multipart uploads. There is one shadow mpu
// bucket per bucket and its name is the mpuBucketPrefix followed
// by the name of the final destination bucket for the object
// once the multipart upload is complete.
mpuBucketPrefix: 'mpuShadowBucket',
}; };

View File

@ -1,54 +0,0 @@
const Redis = require('ioredis');
class RedisClient {
/**
* @constructor
* @param {Object} config - config
* @param {string} config.host - Redis host
* @param {number} config.port - Redis port
* @param {string} config.password - Redis password
* @param {werelogs.Logger} logger - logger instance
*/
constructor(config, logger) {
this._client = new Redis(config);
this._client.on('error', err =>
logger.trace('error from redis', {
error: err,
method: 'RedisClient.constructor',
redisHost: config.host,
redisPort: config.port,
})
);
return this;
}
/**
* increment value of a key by 1 and set a ttl
* @param {string} key - key holding the value
* @param {number} expiry - expiry in seconds
* @param {callback} cb - callback
* @return {undefined}
*/
incrEx(key, expiry, cb) {
return this._client
.multi([['incr', key], ['expire', key, expiry]])
.exec(cb);
}
/**
* execute a batch of commands
* @param {string[]} cmds - list of commands
* @param {callback} cb - callback
* @return {undefined}
*/
batch(cmds, cb) {
return this._client.pipeline(cmds).exec(cb);
}
clear(cb) {
return this._client.flushdb(cb);
}
}
module.exports = RedisClient;

View File

@ -1,150 +0,0 @@
const async = require('async');
class StatsClient {
/**
* @constructor
* @param {object} redisClient - RedisClient instance
* @param {number} interval - sampling interval by seconds
* @param {number} expiry - sampling duration by seconds
*/
constructor(redisClient, interval, expiry) {
this._redis = redisClient;
this._interval = interval;
this._expiry = expiry;
return this;
}
/*
* Utility function to use when callback is undefined
*/
_noop() {}
/**
* normalize to the nearest interval
* @param {object} d - Date instance
* @return {number} timestamp - normalized to the nearest interval
*/
_normalizeTimestamp(d) {
const s = d.getSeconds();
return d.setSeconds(s - s % this._interval, 0);
}
/**
* set timestamp to the previous interval
* @param {object} d - Date instance
* @return {number} timestamp - set to the previous interval
*/
_setPrevInterval(d) {
return d.setSeconds(d.getSeconds() - this._interval);
}
/**
* build redis key to get total number of occurrences on the server
* @param {string} name - key name identifier
* @param {object} d - Date instance
* @return {string} key - key for redis
*/
_buildKey(name, d) {
return `${name}:${this._normalizeTimestamp(d)}`;
}
/**
* reduce the array of values to a single value
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
* @param {array} arr - Date instance
* @return {string} key - key for redis
*/
_getCount(arr) {
return arr.reduce((prev, a) => {
let num = parseInt(a[1], 10);
num = Number.isNaN(num) ? 0 : num;
return prev + num;
}, 0);
}
/**
* report/record a new request received on the server
* @param {string} id - service identifier
* @param {callback} cb - callback
* @return {undefined}
*/
reportNewRequest(id, cb) {
if (!this._redis) {
return undefined;
}
const callback = cb || this._noop;
const key = this._buildKey(`${id}:requests`, new Date());
return this._redis.incrEx(key, this._expiry, callback);
}
/**
* report/record a request that ended up being a 500 on the server
* @param {string} id - service identifier
* @param {callback} cb - callback
* @return {undefined}
*/
report500(id, cb) {
if (!this._redis) {
return undefined;
}
const callback = cb || this._noop;
const key = this._buildKey(`${id}:500s`, new Date());
return this._redis.incrEx(key, this._expiry, callback);
}
/**
* get stats for the last x seconds, x being the sampling duration
* @param {object} log - Werelogs request logger
* @param {string} id - service identifier
* @param {callback} cb - callback to call with the err/result
* @return {undefined}
*/
getStats(log, id, cb) {
if (!this._redis) {
return cb(null, {});
}
const d = new Date();
const totalKeys = Math.floor(this._expiry / this._interval);
const reqsKeys = [];
const req500sKeys = [];
for (let i = 0; i < totalKeys; i++) {
reqsKeys.push(['get', this._buildKey(`${id}:requests`, d)]);
req500sKeys.push(['get', this._buildKey(`${id}:500s`, d)]);
this._setPrevInterval(d);
}
return async.parallel([
next => this._redis.batch(reqsKeys, next),
next => this._redis.batch(req500sKeys, next),
], (err, results) => {
/**
* Batch result is of the format
* [ [null, '1'], [null, '2'], [null, '3'] ] where each
* item is the result of the each batch command
* Foreach item in the result, index 0 signifies the error and
* index 1 contains the result
*/
const statsRes = {
'requests': 0,
'500s': 0,
'sampleDuration': this._expiry,
};
if (err) {
log.error('error getting stats', {
error: err,
method: 'StatsClient.getStats',
});
/**
* Redis for stats is not a critial component, ignoring
* any error here as returning an InternalError
* would be confused with the health of the service
*/
return cb(null, statsRes);
}
statsRes.requests = this._getCount(results[0]);
statsRes['500s'] = this._getCount(results[1]);
return cb(null, statsRes);
});
}
}
module.exports = StatsClient;

View File

@ -1,106 +0,0 @@
const errors = require('../errors');
const validServices = {
aws: ['s3', 'iam', 'sts', 'ring'],
scality: ['utapi', 'sso'],
};
class ARN {
/**
*
* Create an ARN object from its individual components
*
* @constructor
* @param {string} partition - ARN partition (e.g. 'aws')
* @param {string} service - service name in partition (e.g. 's3')
* @param {string} [region] - AWS region
* @param {string} [accountId] - AWS 12-digit account ID
* @param {string} resource - AWS resource path (e.g. 'foo/bar')
*/
constructor(partition, service, region, accountId, resource) {
this._partition = partition;
this._service = service;
this._region = region || null;
this._accountId = accountId || null;
this._resource = resource;
}
static createFromString(arnStr) {
const [arn, partition, service, region, accountId,
resourceType, resource] = arnStr.split(':');
if (arn !== 'arn') {
return { error: errors.InvalidArgument.customizeDescription(
'bad ARN: must start with "arn:"') };
}
if (!partition) {
return { error: errors.InvalidArgument.customizeDescription(
'bad ARN: must include a partition name, like "aws" in ' +
'"arn:aws:..."') };
}
if (!service) {
return { error: errors.InvalidArgument.customizeDescription(
'bad ARN: must include a service name, like "s3" in ' +
'"arn:aws:s3:..."') };
}
if (validServices[partition] === undefined) {
return { error: errors.InvalidArgument.customizeDescription(
`bad ARN: unknown partition "${partition}", should be a ` +
'valid partition name like "aws" in "arn:aws:..."') };
}
if (!validServices[partition].includes(service)) {
return { error: errors.InvalidArgument.customizeDescription(
`bad ARN: unsupported ${partition} service "${service}"`) };
}
if (accountId && !/^([0-9]{12}|[*])$/.test(accountId)) {
return { error: errors.InvalidArgument.customizeDescription(
`bad ARN: bad account ID "${accountId}": ` +
'must be a 12-digit number or "*"') };
}
const fullResource = (resource !== undefined ?
`${resourceType}:${resource}` : resourceType);
return new ARN(partition, service, region, accountId, fullResource);
}
getPartition() {
return this._partition;
}
getService() {
return this._service;
}
getRegion() {
return this._region;
}
getAccountId() {
return this._accountId;
}
getResource() {
return this._resource;
}
isIAMAccount() {
return this.getService() === 'iam'
&& this.getAccountId() !== null
&& this.getAccountId() !== '*'
&& this.getResource() === 'root';
}
isIAMUser() {
return this.getService() === 'iam'
&& this.getAccountId() !== null
&& this.getAccountId() !== '*'
&& this.getResource().startsWith('user/');
}
isIAMRole() {
return this.getService() === 'iam'
&& this.getAccountId() !== null
&& this.getResource().startsWith('role');
}
toString() {
return ['arn', this.getPartition(), this.getService(),
this.getRegion(), this.getAccountId(), this.getResource()]
.join(':');
}
}
module.exports = ARN;

View File

@ -1,66 +1,23 @@
const constants = require('../constants');
const VersionIDUtils = require('../versioning/VersionID');
const ObjectMDLocation = require('./ObjectMDLocation'); // Version 2 changes the format of the data location property
// Version 3 adds the dataStoreName attribute
const modelVersion = 3;
/** /**
* Class to manage metadata object for regular s3 objects (instead of * Class to manage metadata object for regular s3 objects (instead of
* mpuPart metadata for example) * mpuPart metadata for example)
*/ */
class ObjectMD { module.exports = class ObjectMD {
/** /**
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
* reserved for internal use, users should call
* {@link ObjectMD.createFromBlob()} to load from a stored
* metadata blob and check the returned value for errors.
*
* @constructor * @constructor
* @param {ObjectMD|object} [objMd] - object metadata source,
* either an ObjectMD instance or a native JS object parsed from
* JSON
*/
constructor(objMd = undefined) {
this._initMd();
if (objMd !== undefined) {
if (objMd instanceof ObjectMD) {
this._updateFromObjectMD(objMd);
} else {
this._updateFromParsedJSON(objMd);
}
} else {
// set newly-created object md modified time to current time
this._data['last-modified'] = new Date().toJSON();
}
// set latest md model version now that we ensured
// backward-compat conversion
this._data['md-model-version'] = constants.mdModelVersion;
}
/**
* create an ObjectMD instance from stored metadata
* *
* @param {String|Buffer} storedBlob - serialized metadata blob * @param {number} version - Version of the metadata model
* @return {object} a result object containing either a 'result'
* property which value is a new ObjectMD instance on success, or
* an 'error' property on error
*/ */
static createFromBlob(storedBlob) { constructor(version) {
try { const now = new Date().toJSON();
const objMd = JSON.parse(storedBlob);
return { result: new ObjectMD(objMd) };
} catch (err) {
return { error: err };
}
}
getSerialized() {
return JSON.stringify(this.getValue());
}
_initMd() {
// initialize md with default values
this._data = { this._data = {
'md-model-version': version || modelVersion,
'owner-display-name': '', 'owner-display-name': '',
'owner-id': '', 'owner-id': '',
'cache-control': '', 'cache-control': '',
@ -69,6 +26,7 @@ class ObjectMD {
'expires': '', 'expires': '',
'content-length': 0, 'content-length': 0,
'content-type': '', 'content-type': '',
'last-modified': now,
'content-md5': '', 'content-md5': '',
// simple/no version. will expand once object versioning is // simple/no version. will expand once object versioning is
// introduced // introduced
@ -90,7 +48,7 @@ class ObjectMD {
READ_ACP: [], READ_ACP: [],
}, },
'key': '', 'key': '',
'location': null, 'location': [],
'isNull': '', 'isNull': '',
'nullVersionId': '', 'nullVersionId': '',
'isDeleteMarker': '', 'isDeleteMarker': '',
@ -102,37 +60,18 @@ class ObjectMD {
destination: '', destination: '',
storageClass: '', storageClass: '',
role: '', role: '',
storageType: '',
dataStoreVersionId: '',
}, },
'dataStoreName': '', 'dataStoreName': '',
}; };
} }
_updateFromObjectMD(objMd) { /**
// We only duplicate selected attributes here, where setters * Returns metadata model version
// allow to change inner values, and let the others as shallow *
// copies. Since performance is a concern, we want to avoid * @return {number} Metadata model version
// the JSON.parse(JSON.stringify()) method. */
getModelVersion() {
Object.assign(this._data, objMd._data); return this._data['md-model-version'];
Object.assign(this._data.replicationInfo,
objMd._data.replicationInfo);
}
_updateFromParsedJSON(objMd) {
// objMd is a new JS object created for the purpose, it's safe
// to just assign its top-level properties.
Object.assign(this._data, objMd);
this._convertToLatestModel();
}
_convertToLatestModel() {
// handle backward-compat stuff
if (typeof(this._data.location) === 'string') {
this.setLocation([{ key: this._data.location }]);
}
} }
/** /**
@ -523,53 +462,21 @@ class ObjectMD {
/** /**
* Set location * Set location
* *
* @param {object[]} location - array of data locations (see * @param {string[]} location - location
* constructor of {@link ObjectMDLocation} for a description of
* fields for each array object)
* @return {ObjectMD} itself * @return {ObjectMD} itself
*/ */
setLocation(location) { setLocation(location) {
if (!Array.isArray(location) || location.length === 0) {
this._data.location = null;
} else {
this._data.location = location; this._data.location = location;
}
return this; return this;
} }
/** /**
* Returns location * Returns location
* *
* @return {object[]} location * @return {string[]} location
*/ */
getLocation() { getLocation() {
const { location } = this._data; return this._data.location;
return Array.isArray(location) ? location : [];
}
// Object metadata may contain multiple elements for a single part if
// the part was originally copied from another MPU. Here we reduce the
// locations array to a single element for each part.
getReducedLocations() {
const locations = this.getLocation();
const reducedLocations = [];
let partTotal = 0;
for (let i = 0; i < locations.length; i++) {
const currPart = new ObjectMDLocation(locations[i]);
const currPartNum = currPart.getPartNumber();
let nextPartNum = undefined;
if (i < locations.length - 1) {
const nextPart = new ObjectMDLocation(locations[i + 1]);
nextPartNum = nextPart.getPartNumber();
}
partTotal += currPart.getPartSize();
if (currPartNum !== nextPartNum) {
currPart.setPartSize(partTotal);
reducedLocations.push(currPart.getValue());
partTotal = 0;
}
}
return reducedLocations;
} }
/** /**
@ -652,16 +559,6 @@ class ObjectMD {
return this._data.versionId; return this._data.versionId;
} }
/**
* Get metadata versionId value in encoded form (the one visible
* to the S3 API user)
*
* @return {string} The encoded object versionId
*/
getEncodedVersionId() {
return VersionIDUtils.encode(this.getVersionId());
}
/** /**
* Set tags * Set tags
* *
@ -689,16 +586,14 @@ class ObjectMD {
* @return {ObjectMD} itself * @return {ObjectMD} itself
*/ */
setReplicationInfo(replicationInfo) { setReplicationInfo(replicationInfo) {
const { status, content, destination, storageClass, role, const { status, content, destination, storageClass, role } =
storageType, dataStoreVersionId } = replicationInfo; replicationInfo;
this._data.replicationInfo = { this._data.replicationInfo = {
status, status,
content, content,
destination, destination,
storageClass: storageClass || '', storageClass: storageClass || '',
role, role,
storageType: storageType || '',
dataStoreVersionId: dataStoreVersionId || '',
}; };
return this; return this;
} }
@ -712,45 +607,6 @@ class ObjectMD {
return this._data.replicationInfo; return this._data.replicationInfo;
} }
setReplicationStatus(status) {
this._data.replicationInfo.status = status;
return this;
}
setReplicationDataStoreVersionId(versionId) {
this._data.replicationInfo.dataStoreVersionId = versionId;
return this;
}
getReplicationDataStoreVersionId() {
return this._data.replicationInfo.dataStoreVersionId;
}
getReplicationStatus() {
return this._data.replicationInfo.status;
}
getReplicationContent() {
return this._data.replicationInfo.content;
}
getReplicationRoles() {
return this._data.replicationInfo.role;
}
getReplicationStorageType() {
return this._data.replicationInfo.storageType;
}
getReplicationStorageClass() {
return this._data.replicationInfo.storageClass;
}
getReplicationTargetBucket() {
const destBucketArn = this._data.replicationInfo.destination;
return destBucketArn.split(':').slice(-1)[0];
}
/** /**
* Set dataStoreName * Set dataStoreName
* *
@ -771,19 +627,6 @@ class ObjectMD {
return this._data.dataStoreName; return this._data.dataStoreName;
} }
/**
* Get dataStoreVersionId
*
* @return {string} external backend version id for data
*/
getDataStoreVersionId() {
const location = this.getLocation();
if (!location[0]) {
return undefined;
}
return location[0].dataStoreVersionId;
}
/** /**
* Set custom meta headers * Set custom meta headers
* *
@ -822,6 +665,4 @@ class ObjectMD {
getValue() { getValue() {
return this._data; return this._data;
} }
} };
module.exports = ObjectMD;

View File

@ -1,72 +0,0 @@
/**
* Helper class to ease access to a single data location in metadata
* 'location' array
*/
class ObjectMDLocation {
/**
* @constructor
* @param {object} locationObj - single data location info
* @param {string} locationObj.key - data backend key
* @param {number} locationObj.start - index of first data byte of
* this part in the full object
* @param {number} locationObj.size - byte length of data part
* @param {string} locationObj.dataStoreName - type of data store
* @param {string} locationObj.dataStoreETag - internal ETag of
* data part
*/
constructor(locationObj) {
this._data = {
key: locationObj.key,
start: locationObj.start,
size: locationObj.size,
dataStoreName: locationObj.dataStoreName,
dataStoreETag: locationObj.dataStoreETag,
};
}
getKey() {
return this._data.key;
}
getDataStoreName() {
return this._data.dataStoreName;
}
setDataLocation(location) {
this._data.key = location.key;
this._data.dataStoreName = location.dataStoreName;
return this;
}
getDataStoreETag() {
return this._data.dataStoreETag;
}
getPartNumber() {
return Number.parseInt(this._data.dataStoreETag.split(':')[0], 10);
}
getPartETag() {
return this._data.dataStoreETag.split(':')[1];
}
getPartStart() {
return this._data.start;
}
getPartSize() {
return this._data.size;
}
setPartSize(size) {
this._data.size = size;
return this;
}
getValue() {
return this._data;
}
}
module.exports = ObjectMDLocation;

View File

@ -58,8 +58,6 @@ class ReplicationConfiguration {
this._role = null; this._role = null;
this._destination = null; this._destination = null;
this._rules = null; this._rules = null;
this._prevStorageClass = null;
this._isExternalLocation = null;
} }
/** /**
@ -150,16 +148,11 @@ class ReplicationConfiguration {
} }
const role = parsedRole[0]; const role = parsedRole[0];
const rolesArr = role.split(','); const rolesArr = role.split(',');
if (!this._isExternalLocation && rolesArr.length !== 2) { if (rolesArr.length !== 2) {
return errors.InvalidArgument.customizeDescription( return errors.InvalidArgument.customizeDescription(
'Invalid Role specified in replication configuration: ' + 'Invalid Role specified in replication configuration: ' +
'Role must be a comma-separated list of two IAM roles'); 'Role must be a comma-separated list of two IAM roles');
} }
if (this._isExternalLocation && rolesArr.length > 1) {
return errors.InvalidArgument.customizeDescription(
'Invalid Role specified in replication configuration: ' +
'Role may not contain a comma separator');
}
const invalidRole = rolesArr.find(r => !this._isValidRoleARN(r)); const invalidRole = rolesArr.find(r => !this._isValidRoleARN(r));
if (invalidRole !== undefined) { if (invalidRole !== undefined) {
return errors.InvalidArgument.customizeDescription( return errors.InvalidArgument.customizeDescription(
@ -275,6 +268,22 @@ class ReplicationConfiguration {
return undefined; return undefined;
} }
/**
* Check that the `StorageClass` is a valid class
* @param {string} storageClass - The storage class to validate
* @return {boolean} `true` if valid, otherwise `false`
*/
_isValidStorageClass(storageClass) {
if (!this._config) {
return validStorageClasses.includes(storageClass);
}
const replicationEndpoints = this._config.replicationEndpoints
.map(endpoint => endpoint.site);
return replicationEndpoints.includes(storageClass) ||
validStorageClasses.includes(storageClass);
}
/** /**
* Check that the `StorageClass` property is valid * Check that the `StorageClass` property is valid
* @param {object} destination - The destination object from this._parsedXML * @param {object} destination - The destination object from this._parsedXML
@ -283,28 +292,9 @@ class ReplicationConfiguration {
_parseStorageClass(destination) { _parseStorageClass(destination) {
const storageClass = destination.StorageClass && const storageClass = destination.StorageClass &&
destination.StorageClass[0]; destination.StorageClass[0];
if (!this._config) { if (!this._isValidStorageClass(storageClass)) {
return validStorageClasses.includes(storageClass);
}
const replicationEndpoints = this._config.replicationEndpoints
.map(endpoint => endpoint.site);
const locationConstraints =
Object.keys(this._config.locationConstraints);
if (locationConstraints.includes(storageClass)) {
if (this._prevStorageClass !== null &&
this._prevStorageClass !== storageClass) {
return errors.InvalidRequest.customizeDescription(
'The storage class must be same for all rules when ' +
'replicating objects to an external location');
}
this._isExternalLocation = true;
}
if (!replicationEndpoints.includes(storageClass) &&
!locationConstraints.includes(storageClass) &&
!validStorageClasses.includes(storageClass)) {
return errors.MalformedXML; return errors.MalformedXML;
} }
this._prevStorageClass = storageClass;
return undefined; return undefined;
} }
@ -369,11 +359,11 @@ class ReplicationConfiguration {
* @return {undefined} * @return {undefined}
*/ */
parseConfiguration() { parseConfiguration() {
const err = this._parseRules(); const err = this._parseRole() || this._parseRules();
if (err) { if (err) {
return err; return err;
} }
return this._parseRole(); return undefined;
} }
/** /**

View File

@ -34,6 +34,8 @@ class RoundRobin {
throw new Error( throw new Error(
'at least one host must be provided for round robin'); 'at least one host must be provided for round robin');
} }
this.hostsList = hostsList.map(item => this._validateHostObj(item));
if (options && options.logger) { if (options && options.logger) {
this.logger = options.logger; this.logger = options.logger;
} }
@ -42,11 +44,6 @@ class RoundRobin {
} else { } else {
this.stickyCount = DEFAULT_STICKY_COUNT; this.stickyCount = DEFAULT_STICKY_COUNT;
} }
if (options && options.defaultPort) {
this.defaultPort = Number.parseInt(options.defaultPort, 10);
}
this.hostsList = hostsList.map(item => this._validateHostObj(item));
// TODO: add blacklisting capability // TODO: add blacklisting capability
@ -93,8 +90,7 @@ class RoundRobin {
port: parsedPort, port: parsedPort,
}; };
} }
return { host: hostItemObj.host, return { host: hostItemObj.host };
port: this.defaultPort };
} }
/** /**

View File

@ -176,7 +176,7 @@ class RESTServer extends httpServer {
throw errors.MissingContentLength; throw errors.MissingContentLength;
} }
size = Number.parseInt(contentLength, 10); size = Number.parseInt(contentLength, 10);
if (Number.isNaN(size)) { if (isNaN(size)) {
throw errors.InvalidInput.customizeDescription( throw errors.InvalidInput.customizeDescription(
'bad Content-Length'); 'bad Content-Length');
} }

View File

@ -596,7 +596,7 @@ function objectStreamToJSON(rstream, wstream, cb) {
streamRPCJSONObj = function _streamRPCJSONObj(obj, wstream, cb) { streamRPCJSONObj = function _streamRPCJSONObj(obj, wstream, cb) {
const cbOnce = jsutil.once(cb); const cbOnce = jsutil.once(cb);
if (typeof obj === 'object') { if (typeof(obj) === 'object') {
if (obj && obj.pipe !== undefined) { if (obj && obj.pipe !== undefined) {
// stream object streams as JSON arrays // stream object streams as JSON arrays
return objectStreamToJSON(obj, wstream, cbOnce); return objectStreamToJSON(obj, wstream, cbOnce);
@ -734,7 +734,7 @@ function RESTServer(params) {
* @return {undefined} * @return {undefined}
*/ */
httpServer.registerServices = function registerServices(...serviceList) { httpServer.registerServices = function registerServices(...serviceList) {
this.serviceList.push(...serviceList); this.serviceList.push.apply(this.serviceList, serviceList);
}; };
return httpServer; return httpServer;

View File

@ -134,7 +134,8 @@ class SIOInputStream extends stream.Readable {
if (this._destroyed) { if (this._destroyed) {
return; return;
} }
this._readState.pushBuffer.push(...data); this._readState.pushBuffer.push.apply(this._readState.pushBuffer,
data);
if (this._readState.readable) { if (this._readState.readable) {
this._pushData(); this._pushData();
} }
@ -259,9 +260,9 @@ class SIOStreamSocket {
return arg; return arg;
} }
const log = this.logger; const log = this.logger;
const isReadStream = (typeof arg.pipe === 'function' const isReadStream = (typeof(arg.pipe) === 'function'
&& typeof (arg.read) === 'function'); && typeof (arg.read) === 'function');
let isWriteStream = (typeof arg.write === 'function'); let isWriteStream = (typeof(arg.write) === 'function');
if (isReadStream || isWriteStream) { if (isReadStream || isWriteStream) {
if (isReadStream && isWriteStream) { if (isReadStream && isWriteStream) {
@ -302,7 +303,7 @@ class SIOStreamSocket {
} }
return encodedStream; return encodedStream;
} }
if (typeof arg === 'object') { if (typeof(arg) === 'object') {
let encodedObj; let encodedObj;
if (Array.isArray(arg)) { if (Array.isArray(arg)) {
encodedObj = []; encodedObj = [];
@ -376,7 +377,7 @@ class SIOStreamSocket {
}); });
return stream; return stream;
} }
if (typeof arg === 'object') { if (typeof(arg) === 'object') {
let decodedObj; let decodedObj;
if (Array.isArray(arg)) { if (Array.isArray(arg)) {
decodedObj = []; decodedObj = [];

View File

@ -91,10 +91,6 @@ const _actionMapSSO = {
SsoAuthorize: 'sso:Authorize', SsoAuthorize: 'sso:Authorize',
}; };
const _actionMapSTS = {
assumeRole: 'sts:AssumeRole',
};
function _findAction(service, method) { function _findAction(service, method) {
if (service === 's3') { if (service === 's3') {
return _actionMap[method]; return _actionMap[method];
@ -112,9 +108,6 @@ function _findAction(service, method) {
// currently only method is ListMetrics // currently only method is ListMetrics
return `utapi:${method}`; return `utapi:${method}`;
} }
if (service === 'sts') {
return _actionMapSTS[method];
}
return undefined; return undefined;
} }
@ -130,17 +123,13 @@ function _buildArn(service, generalResource, specificResource, requesterInfo) {
} }
return 'arn:aws:s3:::'; return 'arn:aws:s3:::';
} }
if (service === 'iam' || service === 'sts') { if (service === 'iam') {
// arn:aws:iam::<account-id>:<resource-type><resource> // arn:aws:iam::<account-id>:<resource-type><resource>
let accountId = requesterInfo.accountid;
if (service === 'sts') {
accountId = requesterInfo.targetAccountId;
}
if (specificResource) { if (specificResource) {
return `arn:aws:iam::${accountId}:` + return `arn:aws:iam::${requesterInfo.accountid}:` +
`${generalResource}${specificResource}`; `${generalResource}${specificResource}`;
} }
return `arn:aws:iam::${accountId}:${generalResource}`; return `arn:aws:iam::${requesterInfo.accountid}:${generalResource}`;
} }
if (service === 'ring') { if (service === 'ring') {
// arn:aws:iam::<account-id>:<resource-type><resource> // arn:aws:iam::<account-id>:<resource-type><resource>
@ -188,7 +177,6 @@ function _buildArn(service, generalResource, specificResource, requesterInfo) {
* @param {string} authType - type of authentication used * @param {string} authType - type of authentication used
* @param {number} signatureAge - age of signature in milliseconds * @param {number} signatureAge - age of signature in milliseconds
* @param {string} securityToken - auth security token (temporary credentials) * @param {string} securityToken - auth security token (temporary credentials)
* @param {string} policyArn - policy arn
* @return {RequestContext} a RequestContext instance * @return {RequestContext} a RequestContext instance
*/ */
@ -196,8 +184,7 @@ class RequestContext {
constructor(headers, query, generalResource, specificResource, constructor(headers, query, generalResource, specificResource,
requesterIp, sslEnabled, apiMethod, requesterIp, sslEnabled, apiMethod,
awsService, locationConstraint, requesterInfo, awsService, locationConstraint, requesterInfo,
signatureVersion, authType, signatureAge, securityToken, policyArn, signatureVersion, authType, signatureAge, securityToken) {
action) {
this._headers = headers; this._headers = headers;
this._query = query; this._query = query;
this._requesterIp = requesterIp; this._requesterIp = requesterIp;
@ -223,8 +210,7 @@ class RequestContext {
this._authType = authType; this._authType = authType;
this._signatureAge = signatureAge; this._signatureAge = signatureAge;
this._securityToken = securityToken; this._securityToken = securityToken;
this._policyArn = policyArn;
this._action = action;
return this; return this;
} }
@ -251,8 +237,6 @@ class RequestContext {
locationConstraint: this._locationConstraint, locationConstraint: this._locationConstraint,
tokenIssueTime: this._tokenIssueTime, tokenIssueTime: this._tokenIssueTime,
securityToken: this._securityToken, securityToken: this._securityToken,
policyArn: this._policyArn,
action: this._action,
}; };
return JSON.stringify(requestInfo); return JSON.stringify(requestInfo);
} }
@ -273,8 +257,7 @@ class RequestContext {
obj.specificResource, obj.requesterIp, obj.sslEnabled, obj.specificResource, obj.requesterIp, obj.sslEnabled,
obj.apiMethod, obj.awsService, obj.locationConstraint, obj.apiMethod, obj.awsService, obj.locationConstraint,
obj.requesterInfo, obj.signatureVersion, obj.requesterInfo, obj.signatureVersion,
obj.authType, obj.signatureAge, obj.securityToken, obj.policyArn, obj.authType, obj.signatureAge, obj.securityToken);
obj.action);
} }
/** /**
@ -282,9 +265,6 @@ class RequestContext {
* @return {string} action * @return {string} action
*/ */
getAction() { getAction() {
if (this._action) {
return this._action;
}
if (this._foundAction) { if (this._foundAction) {
return this._foundAction; return this._foundAction;
} }
@ -375,26 +355,6 @@ class RequestContext {
return parseIp(this._requesterIp); return parseIp(this._requesterIp);
} }
getRequesterAccountId() {
return this._requesterInfo.accountid;
}
getRequesterEndArn() {
return this._requesterInfo.arn;
}
getRequesterExternalId() {
return this._requesterInfo.externalId;
}
getRequesterPrincipalArn() {
return this._requesterInfo.parentArn || this._requesterInfo.arn;
}
getRequesterType() {
return this._requesterInfo.principalType;
}
/** /**
* Set sslEnabled * Set sslEnabled
* @param {boolean} sslEnabled - true if https used * @param {boolean} sslEnabled - true if https used
@ -588,26 +548,6 @@ class RequestContext {
this._securityToken = token; this._securityToken = token;
return this; return this;
} }
/**
* Get the policy arn
*
* @return {string} policyArn - Policy arn
*/
getPolicyArn() {
return this._policyArn;
}
/**
* Set the policy arn
*
* @param {string} policyArn - Policy arn
* @return {RequestContext} itself
*/
setPolicyArn(policyArn) {
this._policyArn = policyArn;
return this;
}
} }
module.exports = RequestContext; module.exports = RequestContext;

View File

@ -38,7 +38,7 @@ function isResourceApplicable(requestContext, statementResource, log) {
// Pull just the relative id because there is no restriction that it // Pull just the relative id because there is no restriction that it
// does not contain ":" // does not contain ":"
const requestRelativeId = requestResourceArr.slice(5).join(':'); const requestRelativeId = requestResourceArr.slice(5).join(':');
for (let i = 0; i < statementResource.length; i++) { for (let i = 0; i < statementResource.length; i ++) {
// Handle variables (must handle BEFORE wildcards) // Handle variables (must handle BEFORE wildcards)
const policyResource = const policyResource =
substituteVariables(statementResource[i], requestContext); substituteVariables(statementResource[i], requestContext);
@ -73,7 +73,7 @@ function isActionApplicable(requestAction, statementAction, log) {
statementAction = [statementAction]; statementAction = [statementAction];
} }
const length = statementAction.length; const length = statementAction.length;
for (let i = 0; i < length; i++) { for (let i = 0; i < length; i ++) {
// No variables in actions so no need to handle // No variables in actions so no need to handle
const regExStrOfStatementAction = const regExStrOfStatementAction =
handleWildcards(statementAction[i]); handleWildcards(statementAction[i]);
@ -98,12 +98,12 @@ function isActionApplicable(requestAction, statementAction, log) {
* @param {Object} log - logger * @param {Object} log - logger
* @return {boolean} true if meet conditions, false if not * @return {boolean} true if meet conditions, false if not
*/ */
evaluators.meetConditions = (requestContext, statementCondition, log) => { function meetConditions(requestContext, statementCondition, log) {
// The Condition portion of a policy is an object with different // The Condition portion of a policy is an object with different
// operators as keys // operators as keys
const operators = Object.keys(statementCondition); const operators = Object.keys(statementCondition);
const length = operators.length; const length = operators.length;
for (let i = 0; i < length; i++) { for (let i = 0; i < length; i ++) {
const operator = operators[i]; const operator = operators[i];
const hasIfExistsCondition = operator.endsWith('IfExists'); const hasIfExistsCondition = operator.endsWith('IfExists');
// If has "IfExists" added to operator name, find operator name // If has "IfExists" added to operator name, find operator name
@ -119,7 +119,8 @@ evaluators.meetConditions = (requestContext, statementCondition, log) => {
const conditionsWithSameOperator = statementCondition[operator]; const conditionsWithSameOperator = statementCondition[operator];
const conditionKeys = Object.keys(conditionsWithSameOperator); const conditionKeys = Object.keys(conditionsWithSameOperator);
const conditionKeysLength = conditionKeys.length; const conditionKeysLength = conditionKeys.length;
for (let j = 0; j < conditionKeysLength; j++) { for (let j = 0; j < conditionKeysLength;
j ++) {
const key = conditionKeys[j]; const key = conditionKeys[j];
let value = conditionsWithSameOperator[key]; let value = conditionsWithSameOperator[key];
if (!Array.isArray(value)) { if (!Array.isArray(value)) {
@ -170,7 +171,7 @@ evaluators.meetConditions = (requestContext, statementCondition, log) => {
} }
} }
return true; return true;
}; }
/** /**
* Evaluate whether a request is permitted under a policy. * Evaluate whether a request is permitted under a policy.
@ -221,8 +222,7 @@ evaluators.evaluatePolicy = (requestContext, policy, log) => {
continue; continue;
} }
// If do not meet conditions move on to next statement // If do not meet conditions move on to next statement
if (currentStatement.Condition && if (currentStatement.Condition && !meetConditions(requestContext,
!evaluators.meetConditions(requestContext,
currentStatement.Condition, log)) { currentStatement.Condition, log)) {
continue; continue;
} }

View File

@ -1,176 +0,0 @@
const { meetConditions } = require('./evaluator');
/**
* Class with methods to manage the policy 'principal' validation
*/
class Principal {
/**
* Function to evaluate conditions if needed
*
* @param {object} params - Evaluation parameters
* @param {object} statement - Statement policy field
* @return {boolean} True if meet conditions
*/
static _evaluateCondition(params, statement) {
if (statement.Condition) {
return meetConditions(params.rc, statement.Condition, params.log);
}
return true;
}
/**
* Checks principal field against valid principals array
*
* @param {object} params - Evaluation parameters
* @param {object} statement - Statement policy field
* @param {object} valids - Valid principal fields
* @return {string} result of principal evaluation, either 'Neutral',
* 'Allow' or 'Deny'
*/
static _evaluatePrincipalField(params, statement, valids) {
const reverse = !!statement.NotPrincipal;
const principal = statement.Principal || statement.NotPrincipal;
if (typeof principal === 'string' && principal === '*') {
if (reverse) {
// In case of anonymous NotPrincipal, this will neutral everyone
return 'Neutral';
}
if (!Principal._evaluateCondition(params, statement)) {
return 'Neutral';
}
return statement.Effect;
} else if (typeof principal === 'string') {
return 'Deny';
}
let ref = [];
let toCheck = [];
if (valids.Federated && principal.Federated) {
ref = valids.Federated;
toCheck = principal.Federated;
} else if (valids.AWS && principal.AWS) {
ref = valids.AWS;
toCheck = principal.AWS;
} else if (valids.Service && principal.Service) {
ref = valids.Service;
toCheck = principal.Service;
} else {
if (reverse) {
return statement.Effect;
}
return 'Neutral';
}
toCheck = Array.isArray(toCheck) ? toCheck : [toCheck];
ref = Array.isArray(ref) ? ref : [ref];
if (toCheck.indexOf('*') !== -1) {
if (reverse) {
return 'Neutral';
}
if (!Principal._evaluateCondition(params, statement)) {
return 'Neutral';
}
return statement.Effect;
}
const len = ref.length;
for (let i = 0; i < len; ++i) {
if (toCheck.indexOf(ref[i]) !== -1) {
if (reverse) {
return 'Neutral';
}
if (!Principal._evaluateCondition(params, statement)) {
return 'Neutral';
}
return statement.Effect;
}
}
if (reverse) {
return statement.Effect;
}
return 'Neutral';
}
/**
* Function to evaluate principal of statements against a valid principal
* array
*
* @param {object} params - Evaluation parameters
* @param {object} valids - Valid principal fields
* @return {string} result of principal evaluation, either 'Allow' or 'Deny'
*/
static _evaluatePrincipal(params, valids) {
const doc = params.trustedPolicy;
let statements = doc.Statement;
if (!Array.isArray(statements)) {
statements = [statements];
}
const len = statements.length;
let authorized = 'Deny';
for (let i = 0; i < len; ++i) {
const statement = statements[i];
const result = Principal._evaluatePrincipalField(params,
statement, valids);
if (result === 'Deny') {
return 'Deny';
} else if (result === 'Allow') {
authorized = 'Allow';
}
}
return authorized;
}
/**
* Function to evaluate principal for a policy
*
* @param {object} params - Evaluation parameters
* @return {object} {
* result: 'Allow' or 'Deny',
* checkAction: true or false,
* }
*/
static evaluatePrincipal(params) {
let valids = null;
let checkAction = false;
const account = params.rc.getRequesterAccountId();
const targetAccount = params.targetAccountId;
const accountArn = `arn:aws:iam::${account}:root`;
const requesterArn = params.rc.getRequesterPrincipalArn();
const requesterEndArn = params.rc.getRequesterEndArn();
const requesterType = params.rc.getRequesterType();
if (account !== targetAccount) {
valids = {
AWS: [
account,
accountArn,
],
};
checkAction = true;
} else {
if (requesterType === 'User' || requesterType === 'AssumedRole' ||
requesterType === 'Federated') {
valids = {
AWS: [
account,
accountArn,
],
};
if (requesterType === 'User' ||
requesterType === 'AssumedRole') {
valids.AWS.push(requesterArn);
if (requesterEndArn !== requesterArn) {
valids.AWS.push(requesterEndArn);
}
} else {
valids.Federated = [requesterArn];
}
} else if (requesterType === 'Service') {
valids = { Service: requesterArn };
}
}
const result = Principal._evaluatePrincipal(params, valids);
return {
result,
checkAction,
};
}
}
module.exports = Principal;

View File

@ -14,7 +14,8 @@ const handleWildcardInResource =
*/ */
function checkArnMatch(policyArn, requestRelativeId, requestArnArr, function checkArnMatch(policyArn, requestRelativeId, requestArnArr,
caseSensitive) { caseSensitive) {
const regExofArn = handleWildcardInResource(policyArn); let regExofArn = handleWildcardInResource(policyArn);
regExofArn = caseSensitive ? regExofArn : regExofArn.toLowerCase();
// The relativeId is the last part of the ARN (for instance, a bucket and // The relativeId is the last part of the ARN (for instance, a bucket and
// object name in S3) // object name in S3)
// Join on ":" in case there were ":" in the relativeID at the end // Join on ":" in case there were ":" in the relativeID at the end
@ -30,7 +31,7 @@ function checkArnMatch(policyArn, requestRelativeId, requestArnArr,
} }
// Check the other parts of the ARN to make sure they match. If not, // Check the other parts of the ARN to make sure they match. If not,
// return false. // return false.
for (let j = 0; j < 5; j++) { for (let j = 0; j < 5; j ++) {
const segmentRegEx = new RegExp(regExofArn[j]); const segmentRegEx = new RegExp(regExofArn[j]);
const requestSegment = caseSensitive ? requestArnArr[j] : const requestSegment = caseSensitive ? requestArnArr[j] :
requestArnArr[j].toLowerCase(); requestArnArr[j].toLowerCase();

View File

@ -144,8 +144,6 @@ conditions.findConditionKey = (key, requestContext) => {
// header // header
map.set('s3:ObjLocationConstraint', map.set('s3:ObjLocationConstraint',
headers['x-amz-meta-scal-location-constraint']); headers['x-amz-meta-scal-location-constraint']);
map.set('sts:ExternalId', requestContext.getRequesterExternalId());
map.set('iam:PolicyArn', requestContext.getPolicyArn());
return map.get(key); return map.get(key);
}; };

View File

@ -1,83 +0,0 @@
const EventEmitter = require('events');
/**
* Class to collect results of streaming subparts.
* Emits "done" event when streaming is complete and Azure has returned
* results for putting each of the subparts
* Emits "error" event if Azure returns an error for putting a subpart and
* streaming is in-progress
* @class ResultsCollector
*/
class ResultsCollector extends EventEmitter {
/**
* @constructor
*/
constructor() {
super();
this._results = [];
this._queue = 0;
this._streamingFinished = false;
}
/**
* ResultsCollector.pushResult - register result of putting one subpart
* and emit "done" or "error" events if appropriate
* @param {(Error|undefined)} err - error returned from Azure after
* putting a subpart
* @param {number} subPartIndex - the index of the subpart
* @emits ResultCollector#done
* @emits ResultCollector#error
* @return {undefined}
*/
pushResult(err, subPartIndex) {
this._results.push({
error: err,
subPartIndex,
});
this._queue--;
if (this._resultsComplete()) {
this.emit('done', err, this._results);
} else if (err) {
this.emit('error', err, subPartIndex);
}
}
/**
* ResultsCollector.pushOp - register operation to put another subpart
* @return {undefined};
*/
pushOp() {
this._queue++;
}
/**
* ResultsCollector.enableComplete - register streaming has finished,
* allowing ResultCollector#done event to be emitted when last result
* has been returned
* @return {undefined};
*/
enableComplete() {
this._streamingFinished = true;
}
_resultsComplete() {
return (this._queue === 0 && this._streamingFinished);
}
}
/**
* "done" event
* @event ResultCollector#done
* @type {(Error|undefined)} err - error returned by Azure putting last subpart
* @type {object[]} results - result for putting each of the subparts
* @property {Error} [results[].error] - error returned by Azure putting subpart
* @property {number} results[].subPartIndex - index of the subpart
*/
/**
* "error" event
* @event ResultCollector#error
* @type {(Error|undefined)} error - error returned by Azure last subpart
* @type {number} subPartIndex - index of the subpart
*/
module.exports = ResultsCollector;

View File

@ -1,126 +0,0 @@
const stream = require('stream');
/**
* Interface for streaming subparts.
* @class SubStreamInterface
*/
class SubStreamInterface {
/**
* @constructor
* @param {stream.Readable} sourceStream - stream to read for data
*/
constructor(sourceStream) {
this._sourceStream = sourceStream;
this._totalLengthCounter = 0;
this._lengthCounter = 0;
this._subPartIndex = 0;
this._currentStream = new stream.PassThrough();
}
/**
* SubStreamInterface.pauseStreaming - pause data flow
* @return {undefined}
*/
pauseStreaming() {
this._sourceStream.pause();
}
/**
* SubStreamInterface.resumeStreaming - resume data flow
* @return {undefined}
*/
resumeStreaming() {
this._sourceStream.resume();
}
/**
* SubStreamInterface.endStreaming - signal end of data for last stream,
* to be called when source stream has ended
* @return {undefined}
*/
endStreaming() {
this._totalLengthCounter += this._lengthCounter;
this._currentStream.end();
}
/**
* SubStreamInterface.stopStreaming - destroy streams,
* to be called when streaming must be stopped externally
* @param {stream.Readable} [piper] - a stream that is piping data into
* source stream
* @return {undefined}
*/
stopStreaming(piper) {
if (piper) {
piper.unpipe();
piper.destroy();
}
this._sourceStream.destroy();
this._currentStream.destroy();
}
/**
* SubStreamInterface.getLengthCounter - return length of bytes streamed
* for current subpart
* @return {number} - this._lengthCounter
*/
getLengthCounter() {
return this._lengthCounter;
}
/**
* SubStreamInterface.getTotalBytesStreamed - return total bytes streamed
* @return {number} - this._totalLengthCounter
*/
getTotalBytesStreamed() {
return this._totalLengthCounter;
}
/**
* SubStreamInterface.getCurrentStream - return subpart stream currently
* being written to from source stream
* @return {number} - this._currentStream
*/
getCurrentStream() {
return this._currentStream;
}
/**
* SubStreamInterface.transitionToNextStream - signal end of data for
* current stream, generate a new stream and start streaming to new stream
* @return {object} - return object containing new current stream and
* subpart index of current subpart
*/
transitionToNextStream() {
this.pauseStreaming();
this._currentStream.end();
this._totalLengthCounter += this._lengthCounter;
this._lengthCounter = 0;
this._subPartIndex++;
this._currentStream = new stream.PassThrough();
this.resumeStreaming();
return {
nextStream: this._currentStream,
subPartIndex: this._subPartIndex,
};
}
/**
* SubStreamInterface.write - write to the current stream
* @param {Buffer} chunk - a chunk of data
* @return {undefined}
*/
write(chunk) {
const ready = this._currentStream.write(chunk);
if (!ready) {
this.pauseStreaming();
this._currentStream.once('drain', () => {
this.resumeStreaming();
});
}
this._lengthCounter += chunk.length;
}
}
module.exports = SubStreamInterface;

View File

@ -1,224 +0,0 @@
const crypto = require('crypto');
const stream = require('stream');
const ResultsCollector = require('./ResultsCollector');
const SubStreamInterface = require('./SubStreamInterface');
const objectUtils = require('../objectUtils');
const MD5Sum = require('../MD5Sum');
const errors = require('../../errors');
const azureMpuUtils = {};
azureMpuUtils.splitter = '|';
azureMpuUtils.overviewMpuKey = 'azure_mpu';
azureMpuUtils.maxSubPartSize = 104857600;
azureMpuUtils.zeroByteETag = crypto.createHash('md5').update('').digest('hex');
azureMpuUtils.padString = (str, category) => {
const _padFn = {
left: (str, padString) =>
`${padString}${str}`.substr(-padString.length),
right: (str, padString) =>
`${str}${padString}`.substr(0, padString.length),
};
// It's a little more performant if we add pre-generated strings for each
// type of padding we want to apply, instead of using string.repeat() to
// create the padding.
const padSpec = {
partNumber: {
padString: '00000',
direction: 'left',
},
subPart: {
padString: '00',
direction: 'left',
},
part: {
padString:
'%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%',
direction: 'right',
},
};
const { direction, padString } = padSpec[category];
return _padFn[direction](str, padString);
};
// NOTE: If we want to extract the object name from these keys, we will need
// to use a similar method to _getKeyAndUploadIdFromMpuKey since the object
// name may have instances of the splitter used to delimit arguments
azureMpuUtils.getMpuSummaryKey = (objectName, uploadId) =>
`${objectName}${azureMpuUtils.splitter}${uploadId}`;
azureMpuUtils.getBlockId = (uploadId, partNumber, subPartIndex) => {
const paddedPartNumber = azureMpuUtils.padString(partNumber, 'partNumber');
const paddedSubPart = azureMpuUtils.padString(subPartIndex, 'subPart');
const splitter = azureMpuUtils.splitter;
const blockId = `${uploadId}${splitter}partNumber${paddedPartNumber}` +
`${splitter}subPart${paddedSubPart}${splitter}`;
return azureMpuUtils.padString(blockId, 'part');
};
azureMpuUtils.getSummaryPartId = (partNumber, eTag, size) => {
const paddedPartNumber = azureMpuUtils.padString(partNumber, 'partNumber');
const timestamp = Date.now();
const splitter = azureMpuUtils.splitter;
const summaryKey = `${paddedPartNumber}${splitter}${timestamp}` +
`${splitter}${eTag}${splitter}${size}${splitter}`;
return azureMpuUtils.padString(summaryKey, 'part');
};
azureMpuUtils.getSubPartInfo = dataContentLength => {
const numberFullSubParts =
Math.floor(dataContentLength / azureMpuUtils.maxSubPartSize);
const remainder = dataContentLength % azureMpuUtils.maxSubPartSize;
const numberSubParts = remainder ?
numberFullSubParts + 1 : numberFullSubParts;
const lastPartSize = remainder || azureMpuUtils.maxSubPartSize;
return {
lastPartIndex: numberSubParts - 1,
lastPartSize,
};
};
azureMpuUtils.getSubPartSize = (subPartInfo, subPartIndex) => {
const { lastPartIndex, lastPartSize } = subPartInfo;
return subPartIndex === lastPartIndex ?
lastPartSize : azureMpuUtils.maxSubPartSize;
};
azureMpuUtils.getSubPartIds = (part, uploadId) =>
[...Array(part.numberSubParts).keys()].map(subPartIndex =>
azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex));
azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName,
log, cb) => {
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
= params;
const totalSubParts = 1;
const blockId = azureMpuUtils.getBlockId(uploadId, partNumber, 0);
const passThrough = new stream.PassThrough();
const options = {};
if (contentMD5) {
options.useTransactionalMD5 = true;
options.transactionalContentMD5 = contentMD5;
}
request.pipe(passThrough);
return errorWrapperFn('uploadPart', 'createBlockFromStream',
[blockId, bucketName, objectKey, passThrough, size, options,
(err, result) => {
if (err) {
log.error('Error from Azure data backend uploadPart',
{ error: err.message, dataStoreName });
if (err.code === 'ContainerNotFound') {
return cb(errors.NoSuchBucket);
}
if (err.code === 'InvalidMd5') {
return cb(errors.InvalidDigest);
}
if (err.code === 'Md5Mismatch') {
return cb(errors.BadDigest);
}
return cb(errors.InternalError.customizeDescription(
`Error returned from Azure: ${err.message}`)
);
}
const eTag = objectUtils.getHexMD5(result.headers['content-md5']);
return cb(null, eTag, totalSubParts, size);
}], log, cb);
};
azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo,
subPartStream, subPartIndex, resultsCollector, log, cb) => {
const { uploadId, partNumber, bucketName, objectKey } = partParams;
const subPartSize = azureMpuUtils.getSubPartSize(
subPartInfo, subPartIndex);
const subPartId = azureMpuUtils.getBlockId(uploadId, partNumber,
subPartIndex);
resultsCollector.pushOp();
errorWrapperFn('uploadPart', 'createBlockFromStream',
[subPartId, bucketName, objectKey, subPartStream, subPartSize,
{}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb);
};
azureMpuUtils.putSubParts = (errorWrapperFn, request, params,
dataStoreName, log, cb) => {
const subPartInfo = azureMpuUtils.getSubPartInfo(params.size);
const resultsCollector = new ResultsCollector();
const hashedStream = new MD5Sum();
const streamInterface = new SubStreamInterface(hashedStream);
log.trace('data length is greater than max subpart size;' +
'putting multiple parts');
resultsCollector.on('error', (err, subPartIndex) => {
streamInterface.stopStreaming(request);
log.error(`Error putting subpart to Azure: ${subPartIndex}`,
{ error: err.message, dataStoreName });
if (err.code === 'ContainerNotFound') {
return cb(errors.NoSuchBucket);
}
return cb(errors.InternalError.customizeDescription(
`Error returned from Azure: ${err}`));
});
resultsCollector.on('done', (err, results) => {
if (err) {
log.error('Error putting last subpart to Azure',
{ error: err.message, dataStoreName });
if (err.code === 'ContainerNotFound') {
return cb(errors.NoSuchBucket);
}
return cb(errors.InternalError.customizeDescription(
`Error returned from Azure: ${err}`));
}
const numberSubParts = results.length;
const totalLength = streamInterface.getTotalBytesStreamed();
log.trace('successfully put subparts to Azure',
{ numberSubParts, totalLength });
hashedStream.on('hashed', () => cb(null, hashedStream.completedHash,
numberSubParts, totalLength));
// in case the hashed event was already emitted before the
// event handler was registered:
if (hashedStream.completedHash) {
hashedStream.removeAllListeners('hashed');
return cb(null, hashedStream.completedHash, numberSubParts,
totalLength);
}
return undefined;
});
const currentStream = streamInterface.getCurrentStream();
// start first put to Azure before we start streaming the data
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo,
currentStream, 0, resultsCollector, log, cb);
request.pipe(hashedStream);
hashedStream.on('end', () => {
resultsCollector.enableComplete();
streamInterface.endStreaming();
});
hashedStream.on('data', data => {
const currentLength = streamInterface.getLengthCounter();
if (currentLength + data.length > azureMpuUtils.maxSubPartSize) {
const bytesToMaxSize = azureMpuUtils.maxSubPartSize - currentLength;
const firstChunk = bytesToMaxSize === 0 ? data :
data.slice(bytesToMaxSize);
if (bytesToMaxSize !== 0) {
// if we have not streamed full subpart, write enough of the
// data chunk to stream the correct length
streamInterface.write(data.slice(0, bytesToMaxSize));
}
const { nextStream, subPartIndex } =
streamInterface.transitionToNextStream();
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo,
nextStream, subPartIndex, resultsCollector, log, cb);
streamInterface.write(firstChunk);
} else {
streamInterface.write(data);
}
});
};
module.exports = azureMpuUtils;

View File

@ -1,107 +0,0 @@
const querystring = require('querystring');
const escapeForXml = require('./escapeForXml');
const convertMethods = {};
convertMethods.completeMultipartUpload = xmlParams => {
const escapedBucketName = escapeForXml(xmlParams.bucketName);
return '<?xml version="1.0" encoding="UTF-8"?>' +
'<CompleteMultipartUploadResult ' +
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
`<Location>http://${escapedBucketName}.` +
`${escapeForXml(xmlParams.hostname)}/` +
`${escapeForXml(xmlParams.objectKey)}</Location>` +
`<Bucket>${escapedBucketName}</Bucket>` +
`<Key>${escapeForXml(xmlParams.objectKey)}</Key>` +
`<ETag>${escapeForXml(xmlParams.eTag)}</ETag>` +
'</CompleteMultipartUploadResult>';
};
convertMethods.initiateMultipartUpload = xmlParams =>
'<?xml version="1.0" encoding="UTF-8"?>' +
'<InitiateMultipartUploadResult ' +
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>` +
`<Key>${escapeForXml(xmlParams.objectKey)}</Key>` +
`<UploadId>${escapeForXml(xmlParams.uploadId)}</UploadId>` +
'</InitiateMultipartUploadResult>';
convertMethods.listMultipartUploads = xmlParams => {
const xml = [];
const l = xmlParams.list;
xml.push('<?xml version="1.0" encoding="UTF-8"?>',
'<ListMultipartUploadsResult ' +
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`
);
// For certain XML elements, if it is `undefined`, AWS returns either an
// empty tag or does not include it. Hence the `optional` key in the params.
const params = [
{ tag: 'KeyMarker', value: xmlParams.keyMarker },
{ tag: 'UploadIdMarker', value: xmlParams.uploadIdMarker },
{ tag: 'NextKeyMarker', value: l.NextKeyMarker, optional: true },
{ tag: 'NextUploadIdMarker', value: l.NextUploadIdMarker,
optional: true },
{ tag: 'Delimiter', value: l.Delimiter, optional: true },
{ tag: 'Prefix', value: xmlParams.prefix, optional: true },
];
params.forEach(param => {
if (param.value) {
xml.push(`<${param.tag}>${escapeForXml(param.value)}` +
`</${param.tag}>`);
} else if (!param.optional) {
xml.push(`<${param.tag} />`);
}
});
xml.push(`<MaxUploads>${escapeForXml(l.MaxKeys)}</MaxUploads>`,
`<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`
);
l.Uploads.forEach(upload => {
const val = upload.value;
let key = upload.key;
if (xmlParams.encoding === 'url') {
key = querystring.escape(key);
}
xml.push('<Upload>',
`<Key>${escapeForXml(key)}</Key>`,
`<UploadId>${escapeForXml(val.UploadId)}</UploadId>`,
'<Initiator>',
`<ID>${escapeForXml(val.Initiator.ID)}</ID>`,
`<DisplayName>${escapeForXml(val.Initiator.DisplayName)}` +
'</DisplayName>',
'</Initiator>',
'<Owner>',
`<ID>${escapeForXml(val.Owner.ID)}</ID>`,
`<DisplayName>${escapeForXml(val.Owner.DisplayName)}` +
'</DisplayName>',
'</Owner>',
`<StorageClass>${escapeForXml(val.StorageClass)}` +
'</StorageClass>',
`<Initiated>${escapeForXml(val.Initiated)}</Initiated>`,
'</Upload>'
);
});
l.CommonPrefixes.forEach(prefix => {
xml.push('<CommonPrefixes>',
`<Prefix>${escapeForXml(prefix)}</Prefix>`,
'</CommonPrefixes>'
);
});
xml.push('</ListMultipartUploadsResult>');
return xml.join('');
};
function convertToXml(method, xmlParams) {
return convertMethods[method](xmlParams);
}
module.exports = convertToXml;

View File

@ -1,9 +0,0 @@
const objectUtils = {};
objectUtils.getHexMD5 = base64MD5 =>
Buffer.from(base64MD5, 'base64').toString('hex');
objectUtils.getBase64MD5 = hexMD5 =>
Buffer.from(hexMD5, 'hex').toString('base64');
module.exports = objectUtils;

View File

@ -186,7 +186,7 @@ function parseTagFromQuery(tagQuery) {
for (let i = 0; i < pairs.length; i++) { for (let i = 0; i < pairs.length; i++) {
const pair = pairs[i]; const pair = pairs[i];
if (!pair) { if (!pair) {
emptyTag++; emptyTag ++;
continue; continue;
} }
const pairArray = pair.split('='); const pairArray = pair.split('=');

View File

@ -45,7 +45,7 @@ function _checkModifiedSince(ifModifiedSinceTime, lastModified) {
if (ifModifiedSinceTime) { if (ifModifiedSinceTime) {
res.present = true; res.present = true;
const checkWith = (new Date(ifModifiedSinceTime)).getTime(); const checkWith = (new Date(ifModifiedSinceTime)).getTime();
if (Number.isNaN(Number(checkWith))) { if (isNaN(checkWith)) {
res.error = errors.InvalidArgument; res.error = errors.InvalidArgument;
} else if (lastModified <= checkWith) { } else if (lastModified <= checkWith) {
res.error = errors.NotModified; res.error = errors.NotModified;
@ -59,7 +59,7 @@ function _checkUnmodifiedSince(ifUnmodifiedSinceTime, lastModified) {
if (ifUnmodifiedSinceTime) { if (ifUnmodifiedSinceTime) {
res.present = true; res.present = true;
const checkWith = (new Date(ifUnmodifiedSinceTime)).getTime(); const checkWith = (new Date(ifUnmodifiedSinceTime)).getTime();
if (Number.isNaN(Number(checkWith))) { if (isNaN(checkWith)) {
res.error = errors.InvalidArgument; res.error = errors.InvalidArgument;
} else if (lastModified > checkWith) { } else if (lastModified > checkWith) {
res.error = errors.PreconditionFailed; res.error = errors.PreconditionFailed;

View File

@ -35,12 +35,12 @@ function checkUnsupportedRoutes(reqMethod) {
function checkBucketAndKey(bucketName, objectKey, method, reqQuery, function checkBucketAndKey(bucketName, objectKey, method, reqQuery,
blacklistedPrefixes, log) { blacklistedPrefixes, log) {
// bucketName should also be undefined, but is checked below anyway // if empty name and request not a list Buckets
const getServiceCall = (method === 'GET' && !objectKey); if (!bucketName && !(method === 'GET' && !objectKey)) {
// if empty name and request not a list Buckets or preflight request
if (!bucketName && !(getServiceCall || method === 'OPTIONS')) {
log.debug('empty bucket name', { method: 'routes' }); log.debug('empty bucket name', { method: 'routes' });
return errors.MethodNotAllowed; return (method !== 'OPTIONS') ?
errors.MethodNotAllowed : errors.AccessForbidden
.customizeDescription('CORSResponse: Bucket not found');
} }
if (bucketName !== undefined && routesUtils.isValidBucketName(bucketName, if (bucketName !== undefined && routesUtils.isValidBucketName(bucketName,
blacklistedPrefixes.bucket) === false) { blacklistedPrefixes.bucket) === false) {
@ -186,7 +186,7 @@ function routes(req, res, params, logger) {
if (statsClient) { if (statsClient) {
// report new request for stats // report new request for stats
statsClient.reportNewRequest('s3'); statsClient.reportNewRequest();
} }
try { try {

View File

@ -16,7 +16,8 @@ function routeDELETE(request, response, api, log, statsClient) {
return routesUtils.responseNoBody(err, corsHeaders, response, return routesUtils.responseNoBody(err, corsHeaders, response,
204, log); 204, log);
}); });
} else if (request.objectKey === undefined) { } else {
if (request.objectKey === undefined) {
if (request.query.website !== undefined) { if (request.query.website !== undefined) {
return api.callApiMethod('bucketDeleteWebsite', request, return api.callApiMethod('bucketDeleteWebsite', request,
response, log, (err, corsHeaders) => { response, log, (err, corsHeaders) => {
@ -70,6 +71,7 @@ function routeDELETE(request, response, api, log, statsClient) {
204, log); 204, log);
}); });
} }
}
return undefined; return undefined;
} }

View File

@ -8,11 +8,9 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
} else if (request.bucketName === undefined } else if (request.bucketName === undefined
&& request.objectKey === undefined) { && request.objectKey === undefined) {
// GET service // GET service
api.callApiMethod('serviceGet', request, response, log, api.callApiMethod('serviceGet', request, response, log, (err, xml) => {
(err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, log, return routesUtils.responseXMLBody(err, xml, response, log);
corsHeaders);
}); });
} else if (request.objectKey === undefined) { } else if (request.objectKey === undefined) {
// GET bucket ACL // GET bucket ACL
@ -76,7 +74,6 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
}); });
} }
} else { } else {
/* eslint-disable no-lonely-if */
if (request.query.acl !== undefined) { if (request.query.acl !== undefined) {
// GET object ACL // GET object ACL
api.callApiMethod('objectGetACL', request, response, log, api.callApiMethod('objectGetACL', request, response, log,
@ -116,7 +113,6 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
range, log); range, log);
}); });
} }
/* eslint-enable */
} }
} }

View File

@ -10,8 +10,8 @@ function routePUT(request, response, api, log, statsClient) {
// content-length for object is handled separately below // content-length for object is handled separately below
const contentLength = request.headers['content-length']; const contentLength = request.headers['content-length'];
if ((contentLength && (Number.isNaN(Number(contentLength)) if ((contentLength && (isNaN(contentLength) || contentLength < 0)) ||
|| contentLength < 0)) || contentLength === '') { contentLength === '') {
log.debug('invalid content-length header'); log.debug('invalid content-length header');
return routesUtils.responseNoBody( return routesUtils.responseNoBody(
errors.BadRequest, null, response, null, log); errors.BadRequest, null, response, null, log);

View File

@ -310,8 +310,7 @@ function _contentLengthMatchesLocations(contentLength, dataLocations) {
(sum, location) => (sum !== undefined && location.size ? (sum, location) => (sum !== undefined && location.size ?
sum + Number.parseInt(location.size, 10) : sum + Number.parseInt(location.size, 10) :
undefined), 0); undefined), 0);
return sumSizes === undefined || return sumSizes === undefined || sumSizes === contentLength;
sumSizes === Number.parseInt(contentLength, 10);
} }
const routesUtils = { const routesUtils = {
@ -831,7 +830,6 @@ const routesUtils = {
*/ */
isValidBucketName(bucketname, prefixBlacklist) { isValidBucketName(bucketname, prefixBlacklist) {
const ipAddressRegex = new RegExp(/^(\d+\.){3}\d+$/); const ipAddressRegex = new RegExp(/^(\d+\.){3}\d+$/);
// eslint-disable-next-line no-useless-escape
const dnsRegex = new RegExp(/^[a-z0-9]+([\.\-]{1}[a-z0-9]+)*$/); const dnsRegex = new RegExp(/^[a-z0-9]+([\.\-]{1}[a-z0-9]+)*$/);
// Must be at least 3 and no more than 63 characters long. // Must be at least 3 and no more than 63 characters long.
if (bucketname.length < 3 || bucketname.length > 63) { if (bucketname.length < 3 || bucketname.length > 63) {
@ -880,7 +878,7 @@ const routesUtils = {
*/ */
statsReport500(err, statsClient) { statsReport500(err, statsClient) {
if (statsClient && err && err.code === 500) { if (statsClient && err && err.code === 500) {
statsClient.report500('s3'); statsClient.report500();
} }
return undefined; return undefined;
}, },

View File

@ -91,7 +91,7 @@ class MetadataFileClient {
return done(err); return done(err);
} }
this.logger.info('connected to record log service', { url }); this.logger.info('connected to record log service', { url });
return done(null, logProxy); return done();
}); });
return logProxy; return logProxy;
} }

View File

@ -147,7 +147,7 @@ class MetadataFileServer {
} }
this.servers.forEach(server => { this.servers.forEach(server => {
server.registerServices(...this.services); server.registerServices.apply(server, this.services);
}); });
this.genUUIDIfNotExists(); this.genUUIDIfNotExists();

View File

@ -179,7 +179,6 @@ class TestMatrix {
this.listOfSpecialCase.forEach(specialCase => { this.listOfSpecialCase.forEach(specialCase => {
const keyCase = specialCase.key; const keyCase = specialCase.key;
const result = Object.keys(keyCase).every(currentKey => { const result = Object.keys(keyCase).every(currentKey => {
// eslint-disable-next-line no-prototype-builtins
if (this.params.hasOwnProperty(currentKey) === false) { if (this.params.hasOwnProperty(currentKey) === false) {
return false; return false;
} }

View File

@ -185,7 +185,7 @@ class Version {
} }
function isMasterKey(key) { function isMasterKey(key) {
return !key.includes(VID_SEP); return ! key.includes(VID_SEP);
} }

View File

@ -80,7 +80,7 @@ class VersioningRequestProcessor {
return callback(err); return callback(err);
} }
// answer if value is not a place holder for deletion // answer if value is not a place holder for deletion
if (!Version.isPHD(data)) { if (! Version.isPHD(data)) {
return callback(null, data); return callback(null, data);
} }
logger.debug('master version is a PHD, getting the latest version', logger.debug('master version is a PHD, getting the latest version',

View File

@ -3,7 +3,7 @@
"engines": { "engines": {
"node": "6.9.5" "node": "6.9.5"
}, },
"version": "7.2.0", "version": "7.0.1",
"description": "Common utilities for the S3 project components", "description": "Common utilities for the S3 project components",
"main": "index.js", "main": "index.js",
"repository": { "repository": {
@ -21,12 +21,9 @@
"async": "~2.1.5", "async": "~2.1.5",
"debug": "~2.3.3", "debug": "~2.3.3",
"diskusage": "^0.2.2", "diskusage": "^0.2.2",
"ioredis": "2.4.0",
"ipaddr.js": "1.2.0", "ipaddr.js": "1.2.0",
"joi": "^10.6",
"level": "~1.6.0", "level": "~1.6.0",
"level-sublevel": "~6.6.1", "level-sublevel": "~6.6.1",
"simple-glob": "^0.1",
"socket.io": "~1.7.3", "socket.io": "~1.7.3",
"socket.io-client": "~1.7.3", "socket.io-client": "~1.7.3",
"utf8": "2.1.2", "utf8": "2.1.2",

View File

@ -72,20 +72,4 @@ describe('AuthInfo class constructor', () => {
const publicUser = new AuthInfo({ canonicalID: constants.publicId }); const publicUser = new AuthInfo({ canonicalID: constants.publicId });
assert.strictEqual(publicUser.isRequesterPublicUser(), true); assert.strictEqual(publicUser.isRequesterPublicUser(), true);
}); });
it('should have a working isRequesterAServiceAccount() method', () => {
assert.strictEqual(authInfo.isRequesterAServiceAccount(), false);
const serviceAccount = new AuthInfo({
canonicalID: `${constants.zenkoServiceAccount}/clueso` });
assert.strictEqual(serviceAccount.isRequesterAServiceAccount(), true);
});
it('should have a working isRequesterThisServiceAccount() method', () => {
const serviceAccount = new AuthInfo({
canonicalID: `${constants.zenkoServiceAccount}/clueso` });
assert.strictEqual(
serviceAccount.isRequesterThisServiceAccount('backbeat'), false);
assert.strictEqual(
serviceAccount.isRequesterThisServiceAccount('clueso'), true);
});
}); });

View File

@ -0,0 +1,47 @@
const assert = require('assert');
const Backend = require('../../../../lib/auth/auth').inMemory.backend.s3;
const DummyRequestLogger = require('../../helpers').DummyRequestLogger;
const authData = require('./sample_authdata');
const backend = new Backend(JSON.parse(JSON.stringify(authData)));
const counter = 10;
// eslint-disable-next-line arrow-body-style
const specificResource = [...Array(counter).keys()].map(i => {
return {
key: `key${i}`,
};
});
const generalResource = 'bucketName';
const requestContexts = {
constantParams: {
generalResource,
},
parameterize: {
specificResource,
},
};
const service = 's3';
const userArn = 'aws::iam:123456789012:root';
const log = new DummyRequestLogger();
// eslint-disable-next-line arrow-body-style
const expectedResults = specificResource.map(entry => {
return {
isAllowed: true,
arn: `arn:aws:${service}:::${generalResource}/${entry.key}`,
versionId: undefined,
};
});
describe('S3AuthBackend.checkPolicies', () => {
it(' should mock successful results', done => {
backend.checkPolicies(requestContexts, userArn, log,
(err, vaultReturnObject) => {
assert.strictEqual(err, null, `Unexpected err: ${err}`);
assert.deepStrictEqual(vaultReturnObject, {
message: { body: expectedResults },
});
return done();
});
});
});

View File

@ -2,7 +2,7 @@ const assert = require('assert');
const Indexer = require('../../../../lib/auth/in_memory/Indexer'); const Indexer = require('../../../../lib/auth/in_memory/Indexer');
const ref = require('./sample_authdata.json'); const ref = require('./sample_authdata.json');
const { should } = require('./AuthLoader.spec'); const { should } = require('./validateAuthConfig');
describe('S3 AuthData Indexer', () => { describe('S3 AuthData Indexer', () => {
let obj = {}; let obj = {};
@ -28,6 +28,15 @@ describe('S3 AuthData Indexer', () => {
done(); done();
}); });
it('Should return user from email', done => {
const res = index.getEntityByEmail(obj.accounts[0].users[0].email);
assert.strictEqual(typeof res, 'object');
assert.strictEqual(res.arn, obj.accounts[0].arn);
assert.strictEqual(res.IAMdisplayName,
obj.accounts[0].users[0].name);
done();
});
it('Should return account from key', done => { it('Should return account from key', done => {
const res = index.getEntityByKey(obj.accounts[0].keys[0].access); const res = index.getEntityByKey(obj.accounts[0].keys[0].access);
assert.strictEqual(typeof res, 'object'); assert.strictEqual(typeof res, 'object');
@ -35,6 +44,16 @@ describe('S3 AuthData Indexer', () => {
done(); done();
}); });
it('Should return user from key', done => {
const res = index.getEntityByKey(obj.accounts[0].users[0].keys[0]
.access);
assert.strictEqual(typeof res, 'object');
assert.strictEqual(res.arn, obj.accounts[0].arn);
assert.strictEqual(res.IAMdisplayName,
obj.accounts[0].users[0].name);
done();
});
it('should index account without keys', done => { it('should index account without keys', done => {
should._exec = () => { should._exec = () => {
index = new Indexer(obj); index = new Indexer(obj);

View File

@ -2,22 +2,44 @@
"accounts": [{ "accounts": [{
"name": "Bart", "name": "Bart",
"email": "sampleaccount1@sampling.com", "email": "sampleaccount1@sampling.com",
"arn": "arn:aws:iam::123456789012:root", "arn": "aws::iam:123456789012:root",
"canonicalID": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be", "canonicalID": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be",
"shortid": "123456789012", "shortid": "123456789012",
"keys": [{ "keys": [{
"access": "accessKey1", "access": "accessKey1",
"secret": "verySecretKey1" "secret": "verySecretKey1"
}],
"users": [{
"name": "Bart Jr",
"email": "user1.sampleaccount2@sampling.com",
"arn": "aws::iam:123456789013:bart",
"keys": [{
"access": "USERBARTFUNACCESSKEY",
"secret": "verySecretKey1"
}] }]
}],
"sasToken": "test0"
}, { }, {
"name": "Lisa", "name": "Lisa",
"email": "sampleaccount2@sampling.com", "email": "sampleaccount2@sampling.com",
"arn": "arn:aws:iam::123456789013:root", "arn": "aws::iam:accessKey2:user/Lisa",
"canonicalID": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2bf", "canonicalID": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2bf",
"shortid": "123456789013", "shortid": "123456789012",
"keys": [{ "keys": [{
"access": "accessKey2", "access": "accessKey2",
"secret": "verySecretKey2" "secret": "verySecretKey2"
}] }],
"sasToken": "test1"
}, {
"name": "Docker",
"email": "sampleaccount3@sampling.com",
"arn": "aws::iam:accessKeyDocker:user/Docker",
"canonicalID": "sd359df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47eh3hd",
"shortid": "123456789012",
"keys": [{
"access": "accessKeyDocker",
"secret": "verySecretKeyDocker"
}],
"sasToken": "test2"
}] }]
} }

View File

@ -2,12 +2,13 @@
"accounts": [{ "accounts": [{
"name": "Zenko", "name": "Zenko",
"email": "sampleaccount4@sampling.com", "email": "sampleaccount4@sampling.com",
"arn": "aws::iam:123456789015:root", "arn": "aws::iam:accessKeyZenko:user/Zenko",
"canonicalID": "newCanId", "canonicalID": "newCanId",
"shortid": "123456789015", "shortid": "123456789012",
"keys": [{ "keys": [{
"access": "accessKeyZenko", "access": "accessKeyZenko",
"secret": "verySecretKeyZenko" "secret": "verySecretKeyZenko"
}] }],
"sasToken": "test2"
}] }]
} }

View File

@ -1,7 +1,8 @@
const assert = require('assert'); const assert = require('assert');
const werelogs = require('werelogs'); const werelogs = require('werelogs');
const AuthLoader = require('../../../../lib/auth/auth').inMemory.AuthLoader; const validateAuthConfig
= require('../../../../lib/auth/auth').inMemory.validateAuthConfig;
const ref = require('./sample_authdata.json'); const ref = require('./sample_authdata.json');
werelogs.configure({ werelogs.configure({
@ -15,7 +16,7 @@ function getParentField(obj, field) {
for (let i = 0; i < fields.length - 1; ++i) { for (let i = 0; i < fields.length - 1; ++i) {
const cur = fields[i]; const cur = fields[i];
const n = Number(cur, 10); const n = Number(cur, 10);
if (Number.isNaN(n)) { if (isNaN(n)) {
parent = parent[cur]; parent = parent[cur];
} else { } else {
parent = parent[n]; parent = parent[n];
@ -28,19 +29,15 @@ function getFieldName(field) {
return field.split('.').pop(); return field.split('.').pop();
} }
function shouldFail(obj, done) { function shouldFail(obj, checkSas, done) {
const authLoader = new AuthLoader(werelogs); const res = validateAuthConfig(obj, werelogs, checkSas);
authLoader.addAccounts(obj); assert.strictEqual(res, true);
const res = authLoader.validate();
assert.strictEqual(res, false);
done(); done();
} }
function shouldSucceed(obj, done) { function shouldSuccess(obj, checkSas, done) {
const authLoader = new AuthLoader(werelogs); const res = validateAuthConfig(obj, werelogs, checkSas);
authLoader.addAccounts(obj); assert.strictEqual(res, false);
const res = authLoader.validate();
assert.strictEqual(res, true);
done(); done();
} }
@ -48,15 +45,15 @@ const should = {
_exec: undefined, _exec: undefined,
missingField: (obj, field, done) => { missingField: (obj, field, done) => {
delete getParentField(obj, field)[getFieldName(field)]; delete getParentField(obj, field)[getFieldName(field)];
should._exec(obj, done); should._exec(obj, true, done);
}, },
modifiedField: (obj, field, value, done) => { modifiedField: (obj, field, value, done) => {
getParentField(obj, field)[getFieldName(field)] = value; getParentField(obj, field)[getFieldName(field)] = value;
should._exec(obj, done); should._exec(obj, true, done);
}, },
}; };
describe('AuthLoader class', () => { describe('S3 AuthData Checker', () => {
let obj = {}; let obj = {};
beforeEach(done => { beforeEach(done => {
@ -74,10 +71,18 @@ describe('AuthLoader class', () => {
['accounts.0.email', 64], ['accounts.0.email', 64],
['accounts.0.arn', undefined], ['accounts.0.arn', undefined],
['accounts.0.arn', 64], ['accounts.0.arn', 64],
['accounts.0.sasToken', undefined],
['accounts.0.sasToken', 64],
['accounts.0.canonicalID', undefined], ['accounts.0.canonicalID', undefined],
['accounts.0.canonicalID', 64], ['accounts.0.canonicalID', 64],
['accounts.0.users', 'not an object'],
['accounts.0.users.0.arn', undefined],
['accounts.0.users.0.arn', 64],
['accounts.0.users.0.email', undefined],
['accounts.0.users.0.email', 64],
['accounts.0.users.0.keys', undefined],
['accounts.0.users.0.keys', 'not an Array'],
['accounts.0.keys', 'not an Array'], ['accounts.0.keys', 'not an Array'],
['accounts.0.keys', undefined],
].forEach(test => { ].forEach(test => {
if (test[1] === undefined) { if (test[1] === undefined) {
// Check a failure when deleting required fields // Check a failure when deleting required fields
@ -88,8 +93,7 @@ describe('AuthLoader class', () => {
} else { } else {
// Check a failure when the type of field is different than // Check a failure when the type of field is different than
// expected // expected
it(`should fail when modified field ${test[0]} ${test[1]}`, it(`should fail when modified field ${test[0]}${test[1]}`, done => {
done => {
should._exec = shouldFail; should._exec = shouldFail;
should.modifiedField(obj, test[0], test[1], done); should.modifiedField(obj, test[0], test[1], done);
}); });
@ -105,30 +109,52 @@ describe('AuthLoader class', () => {
'accounts.0.users', 'accounts.0.users',
].forEach(test => { ].forEach(test => {
// Check a success when deleting optional fields // Check a success when deleting optional fields
it(`should return success when missing field ${test}`, done => { it(`should success when missing field ${test[0]}`, done => {
should._exec = shouldSucceed; should._exec = shouldSuccess;
should.missingField(obj, test[0], done); should.missingField(obj, test[0], done);
}); });
}); });
it('Should return error on two same canonicalID', done => { it('Should return success if no sasToken and checkSas false', done => {
obj.accounts[0].canonicalID = obj.accounts[1].canonicalID; obj.accounts[0].sasToken = undefined;
shouldFail(obj, done); shouldSuccess(obj, false, done);
}); });
it('Should return error on two same emails', done => { it('Should return error on two same sasTokens and checkSas true', done => {
obj.accounts[0].sasToken = obj.accounts[1].sasToken;
shouldFail(obj, true, done);
});
it('Should return success on two same sasTokens and checkSas false',
done => {
obj.accounts[0].sasToken = obj.accounts[1].sasToken;
shouldSuccess(obj, false, done);
});
it('Should return error on two same canonicalID', done => {
obj.accounts[0].canonicalID = obj.accounts[1].canonicalID;
shouldFail(obj, null, done);
});
it('Should return error on two same emails, account-account', done => {
obj.accounts[0].email = obj.accounts[1].email; obj.accounts[0].email = obj.accounts[1].email;
shouldFail(obj, done); shouldFail(obj, null, done);
});
it('Should return error on two same emails account-user', done => {
obj.accounts[0].users[0].email = obj.accounts[1].email;
shouldFail(obj, null, done);
}); });
it('Should return error on two same arn', done => { it('Should return error on two same arn', done => {
obj.accounts[0].arn = obj.accounts[1].arn; obj.accounts[0].arn = obj.accounts[0].users[0].arn;
shouldFail(obj, done); shouldFail(obj, null, done);
}); });
it('Should return error on two same access key', done => { it('Should return error on two same access key', done => {
obj.accounts[0].keys[0].access = obj.accounts[1].keys[0].access; obj.accounts[0].keys[0].access =
shouldFail(obj, done); obj.accounts[0].users[0].keys[0].access;
shouldFail(obj, null, done);
}); });
}); });

View File

@ -7,20 +7,16 @@ const constructStringToSign =
const DummyRequestLogger = require('../../helpers').DummyRequestLogger; const DummyRequestLogger = require('../../helpers').DummyRequestLogger;
const log = new DummyRequestLogger(); const log = new DummyRequestLogger();
[
{ path: '', desc: 'constructStringToSign function' }, describe('constructStringToSign function', () => {
{ path: '/_/proxy', desc: 'constructStringToSign function with proxy' },
].forEach(item => {
describe(item.desc, () => {
// Example taken from: http://docs.aws.amazon.com/AmazonS3/ // Example taken from: http://docs.aws.amazon.com/AmazonS3/
// latest/API/sig-v4-header-based-auth.html // latest/API/sig-v4-header-based-auth.html
it('should construct a stringToSign in accordance ' + it('should construct a stringToSign in accordance ' +
'with AWS rules for a get object request (header auth)', () => { 'with AWS rules for a get object request (header auth)', () => {
const path = '/test.txt';
const params = { const params = {
request: { request: {
method: 'GET', method: 'GET',
path: `${item.path}${path}`, path: '/test.txt',
headers: { headers: {
'host': 'examplebucket.s3.amazonaws.com', 'host': 'examplebucket.s3.amazonaws.com',
'x-amz-date': '20130524T000000Z', 'x-amz-date': '20130524T000000Z',
@ -42,13 +38,11 @@ const log = new DummyRequestLogger();
credentialScope: '20130524/us-east-1/s3/aws4_request', credentialScope: '20130524/us-east-1/s3/aws4_request',
timestamp: '20130524T000000Z', timestamp: '20130524T000000Z',
log, log,
proxyPath: item.path ? path : undefined,
}; };
const expectedOutput = 'AWS4-HMAC-SHA256\n' + const expectedOutput = 'AWS4-HMAC-SHA256\n' +
'20130524T000000Z\n' + '20130524T000000Z\n' +
'20130524/us-east-1/s3/aws4_request\n' + '20130524/us-east-1/s3/aws4_request\n' +
'7344ae5b7ee6c3e7e6b0fe0640412a37625d1fbfff95c48bbb2dc439649' + '7344ae5b7ee6c3e7e6b0fe0640412a37625d1fbfff95c48bbb2dc43964946972';
'46972';
const actualOutput = constructStringToSign(params); const actualOutput = constructStringToSign(params);
assert.strictEqual(actualOutput, expectedOutput); assert.strictEqual(actualOutput, expectedOutput);
}); });
@ -57,11 +51,10 @@ const log = new DummyRequestLogger();
// latest/API/sig-v4-header-based-auth.html // latest/API/sig-v4-header-based-auth.html
it('should construct a stringToSign in accordance ' + it('should construct a stringToSign in accordance ' +
'with AWS rules for a put object request (header auth)', () => { 'with AWS rules for a put object request (header auth)', () => {
const path = '/test$file.text';
const params = { const params = {
request: { request: {
method: 'PUT', method: 'PUT',
path: `${item.path}${path}`, path: '/test$file.text',
headers: { headers: {
'date': 'Fri, 24 May 2013 00:00:00 GMT', 'date': 'Fri, 24 May 2013 00:00:00 GMT',
'host': 'examplebucket.s3.amazonaws.com', 'host': 'examplebucket.s3.amazonaws.com',
@ -85,7 +78,6 @@ const log = new DummyRequestLogger();
credentialScope: '20130524/us-east-1/s3/aws4_request', credentialScope: '20130524/us-east-1/s3/aws4_request',
timestamp: '20130524T000000Z', timestamp: '20130524T000000Z',
log, log,
proxyPath: item.path ? path : undefined,
}; };
const expectedOutput = 'AWS4-HMAC-SHA256\n' + const expectedOutput = 'AWS4-HMAC-SHA256\n' +
'20130524T000000Z\n' + '20130524T000000Z\n' +
@ -99,13 +91,11 @@ const log = new DummyRequestLogger();
// Example taken from: http://docs.aws.amazon.com/AmazonS3/ // Example taken from: http://docs.aws.amazon.com/AmazonS3/
// latest/API/sig-v4-header-based-auth.html // latest/API/sig-v4-header-based-auth.html
it('should construct a stringToSign in accordance ' + it('should construct a stringToSign in accordance ' +
'with AWS rules for a pre-signed get url request (query auth)', 'with AWS rules for a pre-signed get url request (query auth)', () => {
() => {
const path = '/test.txt';
const params = { const params = {
request: { request: {
method: 'GET', method: 'GET',
path: `${item.path}${path}`, path: '/test.txt',
headers: { headers: {
host: 'examplebucket.s3.amazonaws.com', host: 'examplebucket.s3.amazonaws.com',
}, },
@ -123,7 +113,6 @@ const log = new DummyRequestLogger();
credentialScope: '20130524/us-east-1/s3/aws4_request', credentialScope: '20130524/us-east-1/s3/aws4_request',
timestamp: '20130524T000000Z', timestamp: '20130524T000000Z',
log, log,
proxyPath: item.path ? path : undefined,
}; };
const expectedOutput = 'AWS4-HMAC-SHA256\n' + const expectedOutput = 'AWS4-HMAC-SHA256\n' +
'20130524T000000Z\n' + '20130524T000000Z\n' +
@ -133,5 +122,4 @@ const log = new DummyRequestLogger();
const actualOutput = constructStringToSign(params); const actualOutput = constructStringToSign(params);
assert.strictEqual(actualOutput, expectedOutput); assert.strictEqual(actualOutput, expectedOutput);
}); });
});
}); });

View File

@ -1,78 +0,0 @@
'use strict'; // eslint-disable-line strict
const assert = require('assert');
const RedisClient = require('../../../lib/metrics/RedisClient');
const StatsClient = require('../../../lib/metrics/StatsClient');
// setup redis client
const config = {
host: '127.0.0.1',
port: 6379,
enableOfflineQueue: false,
};
const fakeLogger = {
trace: () => {},
error: () => {},
};
const redisClient = new RedisClient(config, fakeLogger);
// setup stats client
const STATS_INTERVAL = 5; // 5 seconds
const STATS_EXPIRY = 30; // 30 seconds
const statsClient = new StatsClient(redisClient, STATS_INTERVAL, STATS_EXPIRY);
describe('StatsClient class', () => {
const id = 'arsenal-test';
afterEach(() => redisClient.clear(() => {}));
it('should correctly record a new request', () => {
statsClient.reportNewRequest(id, (err, res) => {
assert.ifError(err);
assert(Array.isArray(res));
assert.equal(res.length, 2);
const expected = [[null, 1], [null, 1]];
assert.deepEqual(res, expected);
});
statsClient.reportNewRequest(id, (err, res) => {
assert.ifError(err);
assert(Array.isArray(res));
assert.equal(res.length, 2);
const expected = [[null, 2], [null, 1]];
assert.deepEqual(res, expected);
});
});
it('should correctly record a 500 on the server', () => {
statsClient.report500(id, (err, res) => {
assert.ifError(err);
assert(Array.isArray(res));
assert.equal(res.length, 2);
const expected = [[null, 1], [null, 1]];
assert.deepEqual(res, expected);
});
});
it('should respond back with total requests', () => {
statsClient.reportNewRequest(id, err => {
assert.ifError(err);
});
statsClient.report500(id, err => {
assert.ifError(err);
});
statsClient.getStats(fakeLogger, id, (err, res) => {
assert.ifError(err);
assert.equal(typeof res, 'object');
assert.equal(Object.keys(res).length, 3);
assert.equal(res.sampleDuration, STATS_EXPIRY);
const expected = { 'requests': 1, '500s': 1, 'sampleDuration': 30 };
assert.deepEqual(res, expected);
});
});
});

View File

@ -1,136 +0,0 @@
const assert = require('assert');
const ARN = require('../../../lib/models/ARN');
describe('ARN object model', () => {
describe('valid ARNs', () => {
[{ arn: 'arn:aws:iam::123456789012:role/backbeat',
service: 'iam',
accountId: '123456789012',
resource: 'role/backbeat',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: true,
},
{ arn: 'arn:aws:iam::*:role/backbeat',
service: 'iam',
accountId: '*',
resource: 'role/backbeat',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: true,
},
{ arn: 'arn:aws:iam:::role/backbeat',
service: 'iam',
accountId: null,
resource: 'role/backbeat',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: false, // not a valid role without an account ID
},
{ arn: 'arn:aws:iam::123456789012:user/bart',
service: 'iam',
accountId: '123456789012',
resource: 'user/bart',
isIAMAccount: false,
isIAMUser: true,
isIAMRole: false,
},
{ arn: 'arn:aws:iam:::user/bart',
service: 'iam',
accountId: null,
resource: 'user/bart',
isIAMAccount: false,
isIAMUser: false, // not a valid user without an account ID
isIAMRole: false,
},
{ arn: 'arn:aws:iam::123456789012:root',
service: 'iam',
accountId: '123456789012',
resource: 'root',
isIAMAccount: true,
isIAMUser: false,
isIAMRole: false,
},
{ arn: 'arn:aws:iam:::root',
service: 'iam',
accountId: null,
resource: 'root',
isIAMAccount: false, // not a valid account without an account ID
isIAMUser: false,
isIAMRole: false,
},
{ arn: 'arn:aws:s3::123456789012:foo/bar/baz/qux',
service: 's3',
accountId: '123456789012',
resource: 'foo/bar/baz/qux',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: false,
},
{ arn: 'arn:aws:s3::123456789012:foo:bar/baz/qux',
service: 's3',
accountId: '123456789012',
resource: 'foo:bar/baz/qux',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: false,
},
{ arn: 'arn:aws:sts::123456789012:foobar',
service: 'sts',
accountId: '123456789012',
resource: 'foobar',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: false,
},
{ arn: 'arn:aws:ring::123456789012:foobar',
service: 'ring',
accountId: '123456789012',
resource: 'foobar',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: false,
},
{ arn: 'arn:scality:utapi::123456789012:foobar',
service: 'utapi',
accountId: '123456789012',
resource: 'foobar',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: false,
},
{ arn: 'arn:scality:sso::123456789012:foobar',
service: 'sso',
accountId: '123456789012',
resource: 'foobar',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: false,
},
].forEach(arnTest => it(`should accept ARN "${arnTest.arn}"`, () => {
const arnObj = ARN.createFromString(arnTest.arn);
assert(arnObj instanceof ARN);
assert.strictEqual(arnObj.getService(), arnTest.service);
assert.strictEqual(arnObj.getAccountId(), arnTest.accountId);
assert.strictEqual(arnObj.getResource(), arnTest.resource);
assert.strictEqual(arnObj.isIAMAccount(), arnTest.isIAMAccount);
assert.strictEqual(arnObj.isIAMUser(), arnTest.isIAMUser);
assert.strictEqual(arnObj.isIAMRole(), arnTest.isIAMRole);
assert.strictEqual(arnObj.toString(), arnTest.arn);
}));
});
describe('bad ARNs', () => {
['',
':',
'foo:',
'arn::iam::123456789012:role/backbeat',
'arn:aws:xxx::123456789012:role/backbeat',
'arn:aws:s3::123456789012345:role/backbeat',
'arn:aws:s3::12345678901b:role/backbeat',
].forEach(arn => it(`should fail with invalid ARN "${arn}"`, () => {
const res = ARN.createFromString(arn);
assert.notStrictEqual(res.error, undefined);
}));
});
});

View File

@ -1,6 +1,5 @@
const assert = require('assert'); const assert = require('assert');
const ObjectMD = require('../../../lib/models/ObjectMD'); const ObjectMD = require('../../../lib/models/ObjectMD');
const constants = require('../../../lib/constants');
describe('ObjectMD class setters/getters', () => { describe('ObjectMD class setters/getters', () => {
let md = null; let md = null;
@ -11,6 +10,7 @@ describe('ObjectMD class setters/getters', () => {
[ [
// In order: data property, value to set/get, default value // In order: data property, value to set/get, default value
['ModelVersion', null, 3],
['OwnerDisplayName', null, ''], ['OwnerDisplayName', null, ''],
['OwnerDisplayName', 'owner-display-name'], ['OwnerDisplayName', 'owner-display-name'],
['OwnerId', null, ''], ['OwnerId', null, ''],
@ -79,8 +79,6 @@ describe('ObjectMD class setters/getters', () => {
destination: '', destination: '',
storageClass: '', storageClass: '',
role: '', role: '',
storageType: '',
dataStoreVersionId: '',
}], }],
['ReplicationInfo', { ['ReplicationInfo', {
status: 'PENDING', status: 'PENDING',
@ -89,8 +87,6 @@ describe('ObjectMD class setters/getters', () => {
storageClass: 'STANDARD', storageClass: 'STANDARD',
role: 'arn:aws:iam::account-id:role/src-resource,' + role: 'arn:aws:iam::account-id:role/src-resource,' +
'arn:aws:iam::account-id:role/dest-resource', 'arn:aws:iam::account-id:role/dest-resource',
storageType: 'aws_s3',
dataStoreVersionId: 'QWY1QQwWn9xJcoz0EgJjJ_t8g4nMYsxo',
}], }],
['DataStoreName', null, ''], ['DataStoreName', null, ''],
].forEach(test => { ].forEach(test => {
@ -114,91 +110,3 @@ describe('ObjectMD class setters/getters', () => {
}); });
}); });
}); });
describe('ObjectMD import from stored blob', () => {
it('should export and import correctly the latest model version', () => {
const md = new ObjectMD();
const jsonMd = md.getSerialized();
const importedRes = ObjectMD.createFromBlob(jsonMd);
assert.ifError(importedRes.error);
const importedMd = importedRes.result;
assert.deepStrictEqual(md, importedMd);
});
it('should convert old location to new location', () => {
const md = new ObjectMD();
const value = md.getValue();
value['md-model-version'] = 1;
value.location = 'stringLocation';
const jsonMd = JSON.stringify(value);
const importedRes = ObjectMD.createFromBlob(jsonMd);
assert.strictEqual(importedRes.error, undefined);
const importedMd = importedRes.result;
const valueImported = importedMd.getValue();
assert.strictEqual(valueImported['md-model-version'],
constants.mdModelVersion);
assert.deepStrictEqual(valueImported.location,
[{ key: 'stringLocation' }]);
});
it('should keep null location as is', () => {
const md = new ObjectMD();
const value = md.getValue();
value.location = null;
const jsonMd = JSON.stringify(value);
const importedRes = ObjectMD.createFromBlob(jsonMd);
assert.strictEqual(importedRes.error, undefined);
const importedMd = importedRes.result;
const valueImported = importedMd.getValue();
assert.deepStrictEqual(valueImported.location, null);
importedMd.setLocation([]);
assert.deepStrictEqual(importedMd.getValue().location, null);
});
it('should add dataStoreName attribute if missing', () => {
const md = new ObjectMD();
const value = md.getValue();
value['md-model-version'] = 2;
delete value.dataStoreName;
const jsonMd = JSON.stringify(value);
const importedRes = ObjectMD.createFromBlob(jsonMd);
assert.strictEqual(importedRes.error, undefined);
const importedMd = importedRes.result;
const valueImported = importedMd.getValue();
assert.strictEqual(valueImported['md-model-version'],
constants.mdModelVersion);
assert.notStrictEqual(valueImported.dataStoreName, undefined);
});
it('should return undefined for dataStoreVersionId if no object location',
() => {
const md = new ObjectMD();
const value = md.getValue();
const jsonMd = JSON.stringify(value);
const importedRes = ObjectMD.createFromBlob(jsonMd);
assert.strictEqual(importedRes.error, undefined);
const importedMd = importedRes.result;
assert.strictEqual(importedMd.getDataStoreVersionId(), undefined);
});
it('should get dataStoreVersionId if saved in object location', () => {
const md = new ObjectMD();
const dummyLocation = {
dataStoreVersionId: 'data-store-version-id',
};
md.setLocation([dummyLocation]);
const value = md.getValue();
const jsonMd = JSON.stringify(value);
const importedRes = ObjectMD.createFromBlob(jsonMd);
assert.strictEqual(importedRes.error, undefined);
const importedMd = importedRes.result;
assert.strictEqual(importedMd.getDataStoreVersionId(),
dummyLocation.dataStoreVersionId);
});
it('should return an error if blob is malformed JSON', () => {
const importedRes = ObjectMD.createFromBlob('{BAD JSON}');
assert.notStrictEqual(importedRes.error, undefined);
assert.strictEqual(importedRes.result, undefined);
});
});

View File

@ -9,7 +9,7 @@ describe('round robin hosts', () => {
caption: 'with { host, port } objects in list', caption: 'with { host, port } objects in list',
hostsList: [{ host: '1.2.3.0', port: 1000 }, hostsList: [{ host: '1.2.3.0', port: 1000 },
{ host: '1.2.3.1', port: 1001 }, { host: '1.2.3.1', port: 1001 },
{ host: '1.2.3.2' }], { host: '1.2.3.2', port: 1002 }],
}, { }, {
caption: 'with "host:port" strings in list', caption: 'with "host:port" strings in list',
hostsList: ['1.2.3.0:1000', hostsList: ['1.2.3.0:1000',
@ -18,8 +18,7 @@ describe('round robin hosts', () => {
}].forEach(testCase => describe(testCase.caption, () => { }].forEach(testCase => describe(testCase.caption, () => {
beforeEach(() => { beforeEach(() => {
roundRobin = new RoundRobin(testCase.hostsList, roundRobin = new RoundRobin(testCase.hostsList,
{ stickyCount: 10, { stickyCount: 10 });
defaultPort: 1002 });
}); });
it('should pick all hosts in turn', () => { it('should pick all hosts in turn', () => {
@ -32,7 +31,8 @@ describe('round robin hosts', () => {
// expect 3 loops of 10 times each of the 3 hosts // expect 3 loops of 10 times each of the 3 hosts
for (let i = 0; i < 90; ++i) { for (let i = 0; i < 90; ++i) {
const hostItem = roundRobin.pickHost(); const hostItem = roundRobin.pickHost();
hostsPickCount[hostItem.host] += 1; hostsPickCount[hostItem.host] =
hostsPickCount[hostItem.host] + 1;
} }
assert.strictEqual(hostsPickCount['1.2.3.0'], 30); assert.strictEqual(hostsPickCount['1.2.3.0'], 30);
assert.strictEqual(hostsPickCount['1.2.3.1'], 30); assert.strictEqual(hostsPickCount['1.2.3.1'], 30);
@ -51,7 +51,8 @@ describe('round robin hosts', () => {
const curHost = roundRobin.getCurrentHost(); const curHost = roundRobin.getCurrentHost();
for (let i = 0; i < 10; ++i) { for (let i = 0; i < 10; ++i) {
const hostItem = roundRobin.pickHost(); const hostItem = roundRobin.pickHost();
hostsPickCount[hostItem.host] += 1; hostsPickCount[hostItem.host] =
hostsPickCount[hostItem.host] + 1;
} }
assert.strictEqual(hostsPickCount[curHost.host], 10); assert.strictEqual(hostsPickCount[curHost.host], 10);
}); });
@ -66,7 +67,8 @@ describe('round robin hosts', () => {
// expect each host to be picked up 3 times // expect each host to be picked up 3 times
for (let i = 0; i < 9; ++i) { for (let i = 0; i < 9; ++i) {
const hostItem = roundRobin.pickNextHost(); const hostItem = roundRobin.pickNextHost();
hostsPickCount[hostItem.host] += 1; hostsPickCount[hostItem.host] =
hostsPickCount[hostItem.host] + 1;
} }
assert.strictEqual(hostsPickCount['1.2.3.0'], 3); assert.strictEqual(hostsPickCount['1.2.3.0'], 3);
assert.strictEqual(hostsPickCount['1.2.3.1'], 3); assert.strictEqual(hostsPickCount['1.2.3.1'], 3);
@ -99,18 +101,5 @@ describe('round robin hosts', () => {
// eslint-disable-next-line no-new // eslint-disable-next-line no-new
new RoundRobin(['zenko.io', 'zenka.ia']); new RoundRobin(['zenko.io', 'zenka.ia']);
}); });
it('should have set default port if not in bootstrap list', () => {
// the current host should be picked 10 times in a row
const portMap = {
'1.2.3.0': 1000,
'1.2.3.1': 1001,
'1.2.3.2': 1002,
};
for (let i = 0; i < 100; ++i) {
const hostItem = roundRobin.pickHost();
assert.strictEqual(hostItem.port, portMap[hostItem.host]);
}
});
})); }));
}); });

View File

@ -175,7 +175,7 @@ describe('REST interface for blob data storage', () => {
const value = resp.read(); const value = resp.read();
assert.strictEqual( assert.strictEqual(
value.toString(), value.toString(),
contents.slice(...sliceArgs)); contents.slice.apply(contents, sliceArgs));
checkContentRange(resp, contentRange[0], checkContentRange(resp, contentRange[0],
contentRange[1]); contentRange[1]);
done(); done();

View File

@ -16,21 +16,21 @@ const levelNet = require('../../../../lib/network/rpc/level-net');
// simply forward the API calls to the db as-is // simply forward the API calls to the db as-is
const dbAsyncAPI = { const dbAsyncAPI = {
put: (env, ...args) => { put: (env, ...args) => {
env.subDb.put(...args); env.subDb.put.apply(env.subDb, args);
}, },
del: (env, ...args) => { del: (env, ...args) => {
env.subDb.del(...args); env.subDb.del.apply(env.subDb, args);
}, },
get: (env, ...args) => { get: (env, ...args) => {
env.subDb.get(...args); env.subDb.get.apply(env.subDb, args);
}, },
batch: (env, ...args) => { batch: (env, ...args) => {
env.subDb.batch(...args); env.subDb.batch.apply(env.subDb, args);
}, },
}; };
const dbSyncAPI = { const dbSyncAPI = {
createReadStream: createReadStream:
(env, ...args) => env.subDb.createReadStream(args), (env, ...args) => env.subDb.createReadStream.apply(env.subDb, args),
}; };
describe('level-net - LevelDB over network', () => { describe('level-net - LevelDB over network', () => {

View File

@ -1,647 +0,0 @@
const assert = require('assert');
const Principal = require('../../../lib/policyEvaluator/principal');
const RequestContext = require('../../../lib/policyEvaluator/RequestContext');
const defaultAccountId = '123456789012';
const anotherAccountId = '098765432112';
const defaultAccountArn = `arn:aws:iam::${defaultAccountId}:root`;
const defaultUserArn = `arn:aws:iam::${defaultAccountId}:user/test`;
const defaultRole = `arn:aws:iam::${defaultAccountId}:role/role1`;
const defaultAssumedRole =
`arn:aws:sts::${defaultAccountId}:assumed-role/role1/session`;
const defaultSamlProvider =
`arn:aws:iam::${defaultAccountId}:saml-provider/provider1`;
const defaultFederatedUser =
`arn:aws:sts::${defaultAccountId}:federated-user/foo`;
const anotherAccountArn = `arn:aws:iam::${anotherAccountId}:root`;
const anotherUserArn = `arn:aws:iam::${anotherAccountId}:user/test`;
const defaultValids = {
AWS: [
defaultAccountId,
defaultAccountArn,
],
};
const defaultParams = {
log: {
trace: () => {},
debug: () => {},
info: () => {},
},
};
describe('Principal evaluator', () => {
[
{
name: 'anonymous as Principal (effect Allow) -> grant access',
statement: {
Principal: '*',
Effect: 'Allow',
},
valids: defaultValids,
result: 'Allow',
},
{
name: 'anonymous as Principal (effect Deny) -> deny access',
statement: {
Principal: '*',
Effect: 'Deny',
},
valids: defaultValids,
result: 'Deny',
},
{
name: 'account (arn) in Principal (effect Allow) -> grant access',
statement: {
Principal: {
AWS: defaultAccountArn,
},
Effect: 'Allow',
},
valids: defaultValids,
result: 'Allow',
},
{
name: 'account (arn) in Principal (effect Deny) -> deny access',
statement: {
Principal: {
AWS: [defaultAccountArn],
},
Effect: 'Deny',
},
valids: defaultValids,
result: 'Deny',
},
{
name: 'account (id) in Principal (effect Allow) -> grant access',
statement: {
Principal: {
AWS: defaultAccountId,
},
Effect: 'Allow',
},
valids: defaultValids,
result: 'Allow',
},
{
name: 'account (id) as Principal (effect Deny) -> deny access',
statement: {
Principal: {
AWS: defaultAccountId,
},
Effect: 'Deny',
},
valids: defaultValids,
result: 'Deny',
},
{
name: 'account not in Principal (effect Allow) -> neutral',
statement: {
Principal: {
AWS: [anotherAccountId],
},
Effect: 'Allow',
},
valids: defaultValids,
result: 'Neutral',
},
{
name: 'account not in Principal (effect Deny) -> neutral',
statement: {
Principal: {
AWS: [anotherAccountId],
},
Effect: 'Deny',
},
valids: defaultValids,
result: 'Neutral',
},
{
name:
'multiple account as Principal (effect Allow) -> grant access',
statement: {
Principal: {
AWS: [anotherAccountId, defaultAccountId],
},
Effect: 'Allow',
},
valids: defaultValids,
result: 'Allow',
},
{
name: 'anonymous as NotPrincipal (effect Allow) -> neutral',
statement: {
NotPrincipal: '*',
Effect: 'Allow',
},
valids: defaultValids,
result: 'Neutral',
},
{
name: 'anonymous as NotPrincipal (effect Deny) -> neutral',
statement: {
NotPrincipal: '*',
Effect: 'Allow',
},
valids: defaultValids,
result: 'Neutral',
},
{
name: 'account (arn) in NotPrincipal (effect Allow) -> neutral',
statement: {
NotPrincipal: {
AWS: defaultAccountArn,
},
Effect: 'Allow',
},
valids: defaultValids,
result: 'Neutral',
},
{
name: 'account (arn) in NotPrincipal (effect Deny) -> neutral',
statement: {
NotPrincipal: {
AWS: [anotherAccountArn, defaultAccountArn],
},
Effect: 'Allow',
},
valids: defaultValids,
result: 'Neutral',
},
{
name: 'account (arn) not in NotPrincipal (effect Allow) -> ' +
'grant access',
statement: {
NotPrincipal: {
AWS: anotherAccountArn,
},
Effect: 'Allow',
},
valids: defaultValids,
result: 'Allow',
},
{
name: 'account (arn) not in NotPrincipal (effect Deny) -> ' +
'deny access',
statement: {
NotPrincipal: {
AWS: anotherAccountArn,
},
Effect: 'Deny',
},
valids: defaultValids,
result: 'Deny',
},
{
name: 'Other entities than AWS in principal (effect Allow) -> ' +
'neutral',
statement: {
Principal: {
Service: 'backbeat',
},
Effect: 'Allow',
},
valids: defaultValids,
result: 'Neutral',
},
{
name: 'Other entities than AWS in principal (effect Deny) -> ' +
'neutral',
statement: {
Principal: {
Service: 'backbeat',
},
Effect: 'Deny',
},
valids: defaultValids,
result: 'Neutral',
},
{
name: 'Service in Principal (effect Allow) -> grant access',
statement: {
Principal: {
Service: 'backbeat',
},
Effect: 'Allow',
},
valids: {
Service: 'backbeat',
},
result: 'Allow',
},
{
name: 'User as principal (effect Allow) -> grant access',
statement: {
Principal: {
AWS: `arn:aws:iam::${defaultAccountId}:user/test`,
},
Effect: 'Allow',
},
valids: {
AWS: `arn:aws:iam::${defaultAccountId}:user/test`,
},
result: 'Allow',
},
{
name: 'User not in Principal (effect Allow) -> neutral',
statement: {
Principal: {
AWS: `arn:aws:iam::${defaultAccountId}:user/test`,
},
Effect: 'Allow',
},
valids: {
AWS: `arn:aws:iam::${defaultAccountId}:user/another/testUser`,
},
result: 'Neutral',
},
{
name: 'Role in Principal (effect Allow) -> grant access',
statement: {
Principal: {
AWS: `arn:aws:iam::${defaultAccountId}:role/role1`,
},
Effect: 'Allow',
},
valids: {
AWS: [
`arn:aws:iam::${defaultAccountId}:role/role1`,
`arn:aws:iam::${defaultAccountId}:assumed-role` +
'/role1/session',
],
},
result: 'Allow',
},
{
name: 'Role in Principal (effect Deny) -> deny access',
statement: {
Principal: {
AWS: `arn:aws:iam::${defaultAccountId}:role/role1`,
},
Effect: 'Deny',
},
valids: {
AWS: [
`arn:aws:iam::${defaultAccountId}:role/role1`,
`arn:aws:iam::${defaultAccountId}:assumed-role` +
'/role1/session',
],
},
result: 'Deny',
},
].forEach(test => {
it(`_evaluatePrincipalField(): ${test.name}`, () => {
assert.strictEqual(Principal._evaluatePrincipalField(defaultParams,
test.statement, test.valids), test.result);
});
});
[
{
name: 'should allow with a neutral',
statement: [
{
Principal: {
AWS: anotherAccountArn,
},
Effect: 'Deny',
},
{
Principal: {
AWS: defaultAccountArn,
},
Effect: 'Allow',
},
],
valids: defaultValids,
result: 'Allow',
},
{
name: 'should deny even with an allow',
statement: [
{
Principal: {
AWS: defaultAccountArn,
},
Effect: 'Allow',
},
{
Principal: {
AWS: defaultAccountArn,
},
Effect: 'Deny',
},
],
valids: defaultValids,
result: 'Deny',
},
{
name: 'should deny if no matches',
statement: [
{
Principal: {
AWS: anotherAccountArn,
},
Effect: 'Allow',
},
],
valids: defaultValids,
result: 'Deny',
},
].forEach(test => {
it(`_evaluatePrincipal(): ${test.name}`, () => {
const params = {
log: defaultParams.log,
trustedPolicy: {
Statement: test.statement,
},
};
const valids = test.valids;
assert.strictEqual(Principal._evaluatePrincipal(params, valids),
test.result);
});
});
[
{
name: 'should check user inside the same account',
statement: [
{
Principal: {
AWS: defaultUserArn,
},
Effect: 'Allow',
},
],
requester: {
accountId: defaultAccountId,
arn: defaultUserArn,
parentArn: null,
userType: 'User',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Allow',
checkAction: false,
},
},
{
name: 'should deny user inside the same account',
statement: [
{
Principal: {
AWS: defaultUserArn,
},
Effect: 'Allow',
},
],
requester: {
accountId: defaultAccountId,
arn: `arn:aws:iam::${defaultAccountId}:user/anotherUser`,
parentArn: null,
userType: 'User',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Deny',
checkAction: false,
},
},
{
name: 'should deny principal if account is deny',
statement: [
{
Principal: {
AWS: defaultAccountId,
},
Effect: 'Deny',
},
{
Principal: {
AWS: defaultUserArn,
},
Effect: 'Allow',
},
],
requester: {
accountId: defaultAccountId,
arn: defaultUserArn,
parentArn: null,
userType: 'User',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Deny',
checkAction: false,
},
},
{
name: 'should deny assumed role if role is deny',
statement: [
{
Principal: {
AWS: defaultRole,
},
Effect: 'Deny',
},
{
Principal: {
AWS: defaultAssumedRole,
},
Effect: 'Allow',
},
],
requester: {
accountId: defaultAccountId,
arn: defaultAssumedRole,
parentArn: defaultRole,
userType: 'AssumedRole',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Deny',
checkAction: false,
},
},
{
name: 'should deny user as principal if account is different',
statement: [
{
Principal: {
AWS: anotherUserArn,
},
Effect: 'Allow',
},
],
requester: {
accountId: anotherAccountId,
arn: anotherUserArn,
parentArn: null,
userType: 'User',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Deny',
checkAction: true,
},
},
{
name: 'should allow user if account is in principal',
statement: [
{
Principal: {
AWS: anotherAccountArn,
},
Effect: 'Allow',
},
],
requester: {
accountId: anotherAccountId,
arn: anotherUserArn,
parentArn: null,
userType: 'User',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Allow',
checkAction: true,
},
},
{
name: 'should allow service as principal',
statement: [
{
Principal: {
Service: 'backbeat',
},
Effect: 'Allow',
},
],
requester: {
accountId: defaultAccountId,
arn: 'backbeat',
parentArn: null,
userType: 'Service',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Allow',
checkAction: false,
},
},
{
name: 'should allow federated provider',
statement: [
{
Principal: {
Federated: defaultSamlProvider,
},
Effect: 'Allow',
},
],
requester: {
accountId: defaultAccountId,
arn: defaultFederatedUser,
parentArn: defaultSamlProvider,
userType: 'Federated',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Allow',
checkAction: false,
},
},
{
name: 'should not allow when external id not matching',
statement: [
{
Principal: {
AWS: anotherAccountId,
},
Effect: 'Allow',
Condition: {
StringEquals: { 'sts:ExternalId': '12345' },
},
},
],
requester: {
accountId: anotherAccountId,
arn: anotherUserArn,
parentArn: null,
userType: 'User',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Deny',
checkAction: true,
},
},
{
name: 'should allow when external id matching',
statement: [
{
Principal: {
AWS: anotherAccountId,
},
Effect: 'Allow',
Condition: {
StringEquals: { 'sts:ExternalId': '4321' },
},
},
],
requester: {
accountId: anotherAccountId,
arn: anotherUserArn,
parentArn: null,
userType: 'User',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Allow',
checkAction: true,
},
},
].forEach(test => {
it(`evaluatePrincipal(): ${test.name}`, () => {
const rc = new RequestContext({}, {}, '', '', '127.0.0.1',
false, 'assumeRole', 'sts', null, {
accountid: test.requester.accountId,
arn: test.requester.arn,
parentArn: test.requester.parentArn,
principalType: test.requester.userType,
externalId: '4321',
}, 'v4', 'V4');
const params = {
log: defaultParams.log,
trustedPolicy: {
Statement: test.statement,
},
rc,
targetAccountId: test.target.accountId,
};
const result = Principal.evaluatePrincipal(params);
assert.deepStrictEqual(result, test.result);
});
});
});

View File

@ -1078,36 +1078,6 @@ describe('policyEvaluator', () => {
check(requestContext, {}, policy, 'Allow'); check(requestContext, {}, policy, 'Allow');
}); });
it('should allow policy arn if meet condition',
() => {
policy.Statement.Condition = {
ArnLike: { 'iam:PolicyArn':
['arn:aws:iam::012345678901:policy/dev/*'] },
};
requestContext.setRequesterInfo(
{ accountid: '012345678901' });
const rcModifiers = {
_policyArn:
'arn:aws:iam::012345678901:policy/dev/devMachine1',
};
check(requestContext, rcModifiers, policy, 'Allow');
});
it('should not allow policy arn if do not meet condition',
() => {
policy.Statement.Condition = {
ArnLike: { 'iam:PolicyArn':
['arn:aws:iam::012345678901:policy/dev/*'] },
};
requestContext.setRequesterInfo(
{ accountid: '012345678901' });
const rcModifiers = {
_policyArn:
'arn:aws:iam::012345678901:policy/admin/deleteUser',
};
check(requestContext, rcModifiers, policy, 'Neutral');
});
it('should allow access with multiple operator conditions ' + it('should allow access with multiple operator conditions ' +
'and multiple conditions under an operator', 'and multiple conditions under an operator',
() => { () => {
@ -1262,13 +1232,11 @@ describe('handleWildcards', () => {
assert.deepStrictEqual(result, '^abc\\*abc\\?abc\\$$'); assert.deepStrictEqual(result, '^abc\\*abc\\?abc\\$$');
}); });
/* eslint-disable no-useless-escape */
it('should escape other regular expression special characters', () => { it('should escape other regular expression special characters', () => {
const result = handleWildcards('*^.+?()|[\]{}'); const result = handleWildcards('*^.+?()|[\]{}');
assert.deepStrictEqual(result, assert.deepStrictEqual(result,
'^.*?\\^\\.\\+.{1}\\(\\)\\|\\[\\\]\\{\\}$'); '^.*?\\^\\.\\+.{1}\\(\\)\\|\\[\\\]\\{\\}$');
}); });
/* eslint-enable */
}); });
describe('substituteVariables', () => { describe('substituteVariables', () => {

View File

@ -1,73 +0,0 @@
const assert = require('assert');
const azureMpuUtils =
require('../../../../lib/s3middleware/azureHelpers/mpuUtils');
const padString = azureMpuUtils.padString;
const getSubPartInfo = azureMpuUtils.getSubPartInfo;
const padStringTests = [
{
category: 'partNumber',
strings: [1, 10, 100, 10000],
expectedResults: ['00001', '00010', '00100', '10000'],
}, {
category: 'subPart',
strings: [1, 50],
expectedResults: ['01', '50'],
}, {
category: 'part',
strings: ['test|'],
expectedResults:
['test|%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'],
},
];
const oneMb = 1024 * 1024;
const oneHundredMb = oneMb * 100;
const subPartInfoTests = [
{
desc: '100 mb',
size: oneHundredMb,
expectedNumberSubParts: 1,
expectedLastPartSize: oneHundredMb,
}, {
desc: '101 mb',
size: oneHundredMb + oneMb,
expectedNumberSubParts: 2,
expectedLastPartSize: oneMb,
}, {
desc: '599 mb',
size: 6 * oneHundredMb - oneMb,
expectedNumberSubParts: 6,
expectedLastPartSize: oneHundredMb - oneMb,
}, {
desc: '600 mb',
size: 6 * oneHundredMb,
expectedNumberSubParts: 6,
expectedLastPartSize: oneHundredMb,
},
];
describe('s3middleware Azure MPU helper utility function', () => {
padStringTests.forEach(test => {
it(`padString should pad a ${test.category}`, done => {
const result = test.strings.map(str =>
padString(str, test.category));
assert.deepStrictEqual(result, test.expectedResults);
done();
});
});
subPartInfoTests.forEach(test => {
const { desc, size, expectedNumberSubParts, expectedLastPartSize }
= test;
it('getSubPartInfo should return correct result for ' +
`dataContentLength of ${desc}`, done => {
const result = getSubPartInfo(size);
const expectedLastPartIndex = expectedNumberSubParts - 1;
assert.strictEqual(result.lastPartIndex, expectedLastPartIndex);
assert.strictEqual(result.lastPartSize, expectedLastPartSize);
done();
});
});
});

View File

@ -1,24 +0,0 @@
const assert = require('assert');
const crypto = require('crypto');
const objectUtils =
require('../../../lib/s3middleware/objectUtils');
const hexHash = 'd41d8cd98f00b204e9800998ecf8427e';
const base64Hash = '1B2M2Y8AsgTpgAmY7PhCfg==';
describe('s3middleware object utilites', () => {
it('should convert hexademal MD5 to base 64', done => {
const hash = crypto.createHash('md5').digest('hex');
const convertedHash = objectUtils.getBase64MD5(hash);
assert.strictEqual(convertedHash, base64Hash);
done();
});
it('should convert base 64 MD5 to hexadecimal', done => {
const hash = crypto.createHash('md5').digest('base64');
const convertedHash = objectUtils.getHexMD5(hash);
assert.strictEqual(convertedHash, hexHash);
done();
});
});