Compare commits

..

No commits in common. "bcabdeeadb3a1070691ab856aa2d4e96c710d895" and "3c54bd740fcc179d1ea500fbc230cc759e80cde9" have entirely different histories.

77 changed files with 915 additions and 3294 deletions

4
.gitignore vendored
View File

@ -1,5 +1 @@
# Logs
*.log
# Dependency directory
node_modules/

View File

@ -7,8 +7,6 @@ general:
machine:
node:
version: 6.9.5
services:
- redis
environment:
CXX: g++-4.9

View File

@ -29,7 +29,6 @@ module.exports = {
evaluators: require('./lib/policyEvaluator/evaluator.js'),
validateUserPolicy: require('./lib/policy/policyValidator')
.validateUserPolicy,
evaluatePrincipal: require('./lib/policyEvaluator/principal'),
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
},
Clustering: require('./lib/Clustering'),
@ -60,22 +59,12 @@ module.exports = {
},
s3middleware: {
userMetadata: require('./lib/s3middleware/userMetadata'),
convertToXml: require('./lib/s3middleware/convertToXml'),
escapeForXml: require('./lib/s3middleware/escapeForXml'),
tagging: require('./lib/s3middleware/tagging'),
validateConditionalHeaders:
require('./lib/s3middleware/validateConditionalHeaders')
.validateConditionalHeaders,
MD5Sum: require('./lib/s3middleware/MD5Sum'),
objectUtils: require('./lib/s3middleware/objectUtils'),
azureHelper: {
mpuUtils:
require('./lib/s3middleware/azureHelpers/mpuUtils'),
ResultsCollector:
require('./lib/s3middleware/azureHelpers/ResultsCollector'),
SubStreamInterface:
require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
},
},
storage: {
metadata: {
@ -94,17 +83,12 @@ module.exports = {
},
utils: require('./lib/storage/utils'),
},
models: {
BucketInfo: require('./lib/models/BucketInfo'),
ObjectMD: require('./lib/models/ObjectMD'),
ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
ARN: require('./lib/models/ARN'),
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
ReplicationConfiguration:
require('./lib/models/ReplicationConfiguration'),
},
metrics: {
StatsClient: require('./lib/metrics/StatsClient'),
RedisClient: require('./lib/metrics/RedisClient'),
},
};

View File

@ -49,14 +49,6 @@ class AuthInfo {
isRequesterPublicUser() {
return this.canonicalID === constants.publicId;
}
isRequesterAServiceAccount() {
return this.canonicalID.startsWith(
`${constants.zenkoServiceAccount}/`);
}
isRequesterThisServiceAccount(serviceName) {
return this.canonicalID ===
`${constants.zenkoServiceAccount}/${serviceName}`;
}
}
module.exports = AuthInfo;

View File

@ -12,7 +12,6 @@ const convertUTCtoISO8601 = require('./v4/timeUtils').convertUTCtoISO8601;
const vaultUtilities = require('./in_memory/vaultUtilities');
const backend = require('./in_memory/Backend');
const validateAuthConfig = require('./in_memory/validateAuthConfig');
const AuthLoader = require('./in_memory/AuthLoader');
const Vault = require('./Vault');
let vault = null;
@ -153,11 +152,10 @@ function doAuth(request, log, cb, awsService, requestContexts) {
* @param {string} accessKey - the accessKey
* @param {string} secretKeyValue - the secretKey
* @param {string} awsService - Aws service related
* @param {sting} [proxyPath] - path that gets proxied by reverse proxy
* @return {undefined}
*/
function generateV4Headers(request, data, accessKey, secretKeyValue,
awsService, proxyPath) {
awsService) {
Object.assign(request, { headers: {} });
const amzDate = convertUTCtoISO8601(Date.now());
// get date without time
@ -188,8 +186,8 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
|| headerName === 'host'
).sort().join(';');
const params = { request, signedHeaders, payloadChecksum,
credentialScope, timestamp, query: data,
awsService: service, proxyPath };
credentialScope, timestamp, query: data,
awsService: service };
const stringToSign = constructStringToSignV4(params);
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue,
region,
@ -216,7 +214,6 @@ module.exports = {
inMemory: {
backend,
validateAuthConfig,
AuthLoader,
},
AuthInfo,
Vault,

View File

@ -1,223 +0,0 @@
const fs = require('fs');
const glob = require('simple-glob');
const joi = require('joi');
const werelogs = require('werelogs');
const ARN = require('../../models/ARN');
/**
* Load authentication information from files or pre-loaded account
* objects
*
* @class AuthLoader
*/
class AuthLoader {
constructor(logApi) {
this._log = new (logApi || werelogs).Logger('S3');
this._authData = { accounts: [] };
// null: unknown validity, true/false: valid or invalid
this._isValid = null;
this._joiKeysValidator = joi.array()
.items({
access: joi.string().required(),
secret: joi.string().required(),
})
.required();
const accountsJoi = joi.array()
.items({
name: joi.string().required(),
email: joi.string().email().required(),
arn: joi.string().required(),
canonicalID: joi.string().required(),
shortid: joi.string().regex(/^[0-9]{12}$/).required(),
keys: this._joiKeysValidator,
// backward-compat
users: joi.array(),
})
.required()
.unique('arn')
.unique('email')
.unique('canonicalID');
this._joiValidator = joi.object({ accounts: accountsJoi });
}
/**
* add one or more accounts to the authentication info
*
* @param {object} authData - authentication data
* @param {object[]} authData.accounts - array of account data
* @param {string} authData.accounts[].name - account name
* @param {string} authData.accounts[].email: email address
* @param {string} authData.accounts[].arn: account ARN,
* e.g. 'arn:aws:iam::123456789012:root'
* @param {string} authData.accounts[].canonicalID account
* canonical ID
* @param {string} authData.accounts[].shortid account ID number,
* e.g. '123456789012'
* @param {object[]} authData.accounts[].keys array of
* access/secret keys
* @param {object[]} authData.accounts[].keys[].access access key
* @param {object[]} authData.accounts[].keys[].secret secret key
* @param {string} [filePath] - optional file path info for
* logging purpose
* @return {undefined}
*/
addAccounts(authData, filePath) {
const isValid = this._validateData(authData, filePath);
if (isValid) {
this._authData.accounts =
this._authData.accounts.concat(authData.accounts);
// defer validity checking when getting data to avoid
// logging multiple times the errors (we need to validate
// all accounts at once to detect duplicate values)
if (this._isValid) {
this._isValid = null;
}
} else {
this._isValid = false;
}
}
/**
* add account information from a file
*
* @param {string} filePath - file path containing JSON
* authentication info (see {@link addAccounts()} for format)
* @return {undefined}
*/
addFile(filePath) {
const authData = JSON.parse(fs.readFileSync(filePath));
this.addAccounts(authData, filePath);
}
/**
* add account information from a filesystem path
*
* @param {string|string[]} globPattern - filesystem glob pattern,
* can be a single string or an array of glob patterns. Globs
* can be simple file paths or can contain glob matching
* characters, like '/a/b/*.json'. The matching files are
* individually loaded as JSON and accounts are added. See
* {@link addAccounts()} for JSON format.
* @return {undefined}
*/
addFilesByGlob(globPattern) {
const files = glob(globPattern);
files.forEach(filePath => this.addFile(filePath));
}
/**
* perform validation on authentication info previously
* loaded. Note that it has to be done on the entire set after an
* update to catch duplicate account IDs or access keys.
*
* @return {boolean} true if authentication info is valid
* false otherwise
*/
validate() {
if (this._isValid === null) {
this._isValid = this._validateData(this._authData);
}
return this._isValid;
}
/**
* get authentication info as a plain JS object containing all accounts
* under the "accounts" attribute, with validation.
*
* @return {object|null} the validated authentication data
* null if invalid
*/
getData() {
return this.validate() ? this._authData : null;
}
_validateData(authData, filePath) {
const res = joi.validate(authData, this._joiValidator,
{ abortEarly: false });
if (res.error) {
this._dumpJoiErrors(res.error.details, filePath);
return false;
}
let allKeys = [];
let arnError = false;
const validatedAuth = res.value;
validatedAuth.accounts.forEach(account => {
// backward-compat: ignore arn if starts with 'aws:' and log a
// warning
if (account.arn.startsWith('aws:')) {
this._log.error(
'account must have a valid AWS ARN, legacy examples ' +
'starting with \'aws:\' are not supported anymore. ' +
'Please convert to a proper account entry (see ' +
'examples at https://github.com/scality/S3/blob/' +
'master/conf/authdata.json). Also note that support ' +
'for account users has been dropped.',
{ accountName: account.name, accountArn: account.arn,
filePath });
arnError = true;
return;
}
if (account.users) {
this._log.error(
'support for account users has been dropped, consider ' +
'turning users into account entries (see examples at ' +
'https://github.com/scality/S3/blob/master/conf/' +
'authdata.json)',
{ accountName: account.name, accountArn: account.arn,
filePath });
arnError = true;
return;
}
const arnObj = ARN.createFromString(account.arn);
if (arnObj.error) {
this._log.error(
'authentication config validation error',
{ reason: arnObj.error.description,
accountName: account.name, accountArn: account.arn,
filePath });
arnError = true;
return;
}
if (!arnObj.isIAMAccount()) {
this._log.error(
'authentication config validation error',
{ reason: 'not an IAM account ARN',
accountName: account.name, accountArn: account.arn,
filePath });
arnError = true;
return;
}
allKeys = allKeys.concat(account.keys);
});
if (arnError) {
return false;
}
const uniqueKeysRes = joi.validate(
allKeys, this._joiKeysValidator.unique('access'));
if (uniqueKeysRes.error) {
this._dumpJoiErrors(uniqueKeysRes.error.details, filePath);
return false;
}
return true;
}
_dumpJoiErrors(errors, filePath) {
errors.forEach(err => {
const logInfo = { item: err.path, filePath };
if (err.type === 'array.unique') {
logInfo.reason = `duplicate value '${err.context.path}'`;
logInfo.dupValue = err.context.value[err.context.path];
} else {
logInfo.reason = err.message;
logInfo.context = err.context;
}
this._log.error('authentication config validation error',
logInfo);
});
}
}
module.exports = AuthLoader;

View File

@ -7,6 +7,10 @@ const calculateSigningKey = require('./vaultUtilities').calculateSigningKey;
const hashSignature = require('./vaultUtilities').hashSignature;
const Indexer = require('./Indexer');
function _buildArn(service, generalResource, specificResource) {
return `arn:aws:${service}:::${generalResource}/${specificResource}`;
}
function _formatResponse(userInfoToSend) {
return {
message: {
@ -38,7 +42,7 @@ class Backend {
/** verifySignatureV2
* @param {string} stringToSign - string to sign built per AWS rules
* @param {string} signatureFromRequest - signature sent with request
* @param {string} accessKey - account accessKey
* @param {string} accessKey - user's accessKey
* @param {object} options - contains algorithm (SHA1 or SHA256)
* @param {function} callback - callback with either error or user info
* @return {function} calls callback
@ -69,7 +73,7 @@ class Backend {
/** verifySignatureV4
* @param {string} stringToSign - string to sign built per AWS rules
* @param {string} signatureFromRequest - signature sent with request
* @param {string} accessKey - account accessKey
* @param {string} accessKey - user's accessKey
* @param {string} region - region specified in request credential
* @param {string} scopeDate - date specified in request credential
* @param {object} options - options to send to Vault
@ -157,6 +161,55 @@ class Backend {
};
return cb(null, vaultReturnObject);
}
/**
* Mocks Vault's response to a policy evaluation request
* Since policies not actually implemented in memory backend,
* we allow users to proceed with request.
* @param {object} requestContextParams - parameters needed to construct
* requestContext in Vault
* @param {object} requestContextParams.constantParams -
* params that have the
* same value for each requestContext to be constructed in Vault
* @param {object} requestContextParams.paramaterize - params that have
* arrays as values since a requestContext needs to be constructed with
* each option in Vault
* @param {object[]} requestContextParams.paramaterize.specificResource -
* specific resources paramaterized as an array of objects containing
* properties `key` and optional `versionId`
* @param {string} userArn - arn of requesting user
* @param {object} log - log object
* @param {function} cb - callback with either error or an array
* of authorization results
* @returns {undefined}
* @callback called with (err, vaultReturnObject)
*/
checkPolicies(requestContextParams, userArn, log, cb) {
let results;
const parameterizeParams = requestContextParams.parameterize;
if (parameterizeParams && parameterizeParams.specificResource) {
// object is parameterized
results = parameterizeParams.specificResource.map(obj => ({
isAllowed: true,
arn: _buildArn(this.service, requestContextParams
.constantParams.generalResource, obj.key),
versionId: obj.versionId,
}));
} else {
results = [{
isAllowed: true,
arn: _buildArn(this.service, requestContextParams
.constantParams.generalResource, requestContextParams
.constantParams.specificResource),
}];
}
const vaultReturnObject = {
message: {
body: results,
},
};
return cb(null, vaultReturnObject);
}
}
@ -173,6 +226,9 @@ class S3AuthBackend extends Backend {
* @param {object[]=} authdata.accounts[].keys - array of key objects
* @param {string} authdata.accounts[].keys[].access - access key
* @param {string} authdata.accounts[].keys[].secret - secret key
* @param {object[]=} authdata.accounts[].users - array of user objects:
* note, same properties as account except no canonical ID / sas token
* @param {string=} authdata.accounts[].sasToken - Azure SAS token
* @return {undefined}
*/
constructor(authdata) {

View File

@ -19,6 +19,9 @@ class Indexer {
* @param {object[]=} authdata.accounts[].keys - array of key objects
* @param {string} authdata.accounts[].keys[].access - access key
* @param {string} authdata.accounts[].keys[].secret - secret key
* @param {object[]=} authdata.accounts[].users - array of user objects:
* note, same properties as account except no canonical ID / sas token
* @param {string=} authdata.accounts[].sasToken - Azure SAS token
* @return {undefined}
*/
constructor(authdata) {
@ -27,6 +30,10 @@ class Indexer {
accessKey: {},
email: {},
};
this.usersBy = {
accessKey: {},
email: {},
};
/*
* This may happen if the application is configured to use another
@ -40,6 +47,23 @@ class Indexer {
this._build(authdata);
}
_indexUser(account, user) {
const userData = {
arn: account.arn,
canonicalID: account.canonicalID,
shortid: account.shortid,
accountDisplayName: account.accountDisplayName,
IAMdisplayName: user.name,
email: user.email.toLowerCase(),
keys: [],
};
this.usersBy.email[userData.email] = userData;
user.keys.forEach(key => {
userData.keys.push(key);
this.usersBy.accessKey[key.access] = userData;
});
}
_indexAccount(account) {
const accountData = {
arn: account.arn,
@ -57,6 +81,11 @@ class Indexer {
this.accountsBy.accessKey[key.access] = accountData;
});
}
if (account.users !== undefined) {
account.users.forEach(user => {
this._indexUser(accountData, user);
});
}
}
_build(authdata) {
@ -97,7 +126,10 @@ class Indexer {
* @return {Object} entity.email - The entity's lowercased email
*/
getEntityByKey(key) {
return this.accountsBy.accessKey[key];
if (this.accountsBy.accessKey.hasOwnProperty(key)) {
return this.accountsBy.accessKey[key];
}
return this.usersBy.accessKey[key];
}
/**
@ -118,6 +150,9 @@ class Indexer {
*/
getEntityByEmail(email) {
const lowerCasedEmail = email.toLowerCase();
if (this.usersBy.email.hasOwnProperty(lowerCasedEmail)) {
return this.usersBy.email[lowerCasedEmail];
}
return this.accountsBy.email[lowerCasedEmail];
}

View File

@ -1,18 +1,194 @@
const AuthLoader = require('./AuthLoader');
const werelogs = require('werelogs');
function _incr(count) {
if (count !== undefined) {
return count + 1;
}
return 1;
}
/**
* @deprecated please use {@link AuthLoader} class instead
* This function ensures that the field `name` inside `container` is of the
* expected `type` inside `obj`. If any error is found, an entry is added into
* the error collector object.
*
* @param {object} data - the error collector object
* @param {string} container - the name of the entity that contains
* what we're checking
* @param {string} name - the name of the entity we're checking for
* @param {string} type - expected typename of the entity we're checking
* @param {object} obj - the object we're checking the fields of
* @return {boolean} true if the type is Ok and no error found
* false if an error was found and reported
*/
function _checkType(data, container, name, type, obj) {
if ((type === 'array' && !Array.isArray(obj[name]))
|| (type !== 'array' && typeof obj[name] !== type)) {
data.errors.push({
txt: 'property is not of the expected type',
obj: {
entity: container,
property: name,
type: typeof obj[name],
expectedType: type,
},
});
return false;
}
return true;
}
/**
* This function ensures that the field `name` inside `obj` which is a
* `container`. If any error is found, an entry is added into the error
* collector object.
*
* @param {object} data - the error collector object
* @param {string} container - the name of the entity that contains
* what we're checking
* @param {string} name - the name of the entity we're checking for
* @param {string} type - expected typename of the entity we're checking
* @param {object} obj - the object we're checking the fields of
* @return {boolean} true if the field exists and type is Ok
* false if an error was found and reported
*/
function _checkExists(data, container, name, type, obj) {
if (obj[name] === undefined) {
data.errors.push({
txt: 'missing property in auth entity',
obj: {
entity: container,
property: name,
},
});
return false;
}
return _checkType(data, container, name, type, obj);
}
function _checkUser(data, userObj) {
if (_checkExists(data, 'User', 'arn', 'string', userObj)) {
// eslint-disable-next-line no-param-reassign
data.arns[userObj.arn] = _incr(data.arns[userObj.arn]);
}
if (_checkExists(data, 'User', 'email', 'string', userObj)) {
// eslint-disable-next-line no-param-reassign
data.emails[userObj.email] = _incr(data.emails[userObj.email]);
}
if (_checkExists(data, 'User', 'keys', 'array', userObj)) {
userObj.keys.forEach(keyObj => {
// eslint-disable-next-line no-param-reassign
data.keys[keyObj.access] = _incr(data.keys[keyObj.access]);
});
}
}
function _checkAccount(data, accountObj, checkSas) {
if (_checkExists(data, 'Account', 'email', 'string', accountObj)) {
// eslint-disable-next-line no-param-reassign
data.emails[accountObj.email] = _incr(data.emails[accountObj.email]);
}
if (_checkExists(data, 'Account', 'arn', 'string', accountObj)) {
// eslint-disable-next-line no-param-reassign
data.arns[accountObj.arn] = _incr(data.arns[accountObj.arn]);
}
if (_checkExists(data, 'Account', 'canonicalID', 'string', accountObj)) {
// eslint-disable-next-line no-param-reassign
data.canonicalIds[accountObj.canonicalID] =
_incr(data.canonicalIds[accountObj.canonicalID]);
}
if (checkSas &&
_checkExists(data, 'Account', 'sasToken', 'string', accountObj)) {
// eslint-disable-next-line no-param-reassign
data.sasTokens[accountObj.sasToken] =
_incr(data.sasTokens[accountObj.sasToken]);
}
if (accountObj.users) {
if (_checkType(data, 'Account', 'users', 'array', accountObj)) {
accountObj.users.forEach(userObj => _checkUser(data, userObj));
}
}
if (accountObj.keys) {
if (_checkType(data, 'Account', 'keys', 'array', accountObj)) {
accountObj.keys.forEach(keyObj => {
// eslint-disable-next-line no-param-reassign
data.keys[keyObj.access] = _incr(data.keys[keyObj.access]);
});
}
}
}
function _dumpCountError(property, obj, log) {
let count = 0;
Object.keys(obj).forEach(key => {
if (obj[key] > 1) {
log.error('property should be unique', {
property,
value: key,
count: obj[key],
});
++count;
}
});
return count;
}
function _dumpErrors(checkData, log) {
let nerr = _dumpCountError('CanonicalID', checkData.canonicalIds, log);
nerr += _dumpCountError('Email', checkData.emails, log);
nerr += _dumpCountError('ARN', checkData.arns, log);
nerr += _dumpCountError('AccessKey', checkData.keys, log);
nerr += _dumpCountError('SAS Token', checkData.sasTokens, log);
if (checkData.errors.length > 0) {
checkData.errors.forEach(msg => {
log.error(msg.txt, msg.obj);
});
}
if (checkData.errors.length === 0 && nerr === 0) {
return false;
}
log.fatal('invalid authentication config file (cannot start)');
return true;
}
/**
* @param {object} authdata - the authentication config file's data
* @param {werelogs.API} logApi - object providing a constructor function
* for the Logger object
* @param {(boolean|null)} checkSas - whether to check Azure SAS for ea. account
* @return {boolean} true on erroneous data
* false on success
*/
function validateAuthConfig(authdata, logApi) {
const authLoader = new AuthLoader(logApi);
authLoader.addAccounts(authdata);
return !authLoader.validate();
function validateAuthConfig(authdata, logApi, checkSas) {
const checkData = {
errors: [],
emails: [],
arns: [],
canonicalIds: [],
keys: [],
sasTokens: [],
};
const log = new (logApi || werelogs).Logger('S3');
if (authdata.accounts === undefined) {
checkData.errors.push({
txt: 'no "accounts" array defined in Auth config',
});
return _dumpErrors(checkData, log);
}
authdata.accounts.forEach(account => {
_checkAccount(checkData, account, checkSas);
});
return _dumpErrors(checkData, log);
}
module.exports = validateAuthConfig;

View File

@ -27,7 +27,7 @@ function check(request, log, data) {
milliseconds to compare to Date.now()
*/
const expirationTime = parseInt(data.Expires, 10) * 1000;
if (Number.isNaN(expirationTime)) {
if (isNaN(expirationTime)) {
log.debug('invalid expires parameter',
{ expires: data.Expires });
return { err: errors.MissingSecurityHeader };

View File

@ -10,13 +10,17 @@ const createCanonicalRequest = require('./createCanonicalRequest');
* @returns {string} - stringToSign
*/
function constructStringToSign(params) {
const { request, signedHeaders, payloadChecksum, credentialScope, timestamp,
query, log, proxyPath } = params;
const path = proxyPath || request.path;
const request = params.request;
const signedHeaders = params.signedHeaders;
const payloadChecksum = params.payloadChecksum;
const credentialScope = params.credentialScope;
const timestamp = params.timestamp;
const query = params.query;
const log = params.log;
const canonicalReqResult = createCanonicalRequest({
pHttpVerb: request.method,
pResource: path,
pResource: request.path,
pQuery: query,
pHeaders: request.headers,
pSignedHeaders: signedHeaders,

View File

@ -40,8 +40,7 @@ function createCanonicalRequest(params) {
// canonical query string
let canonicalQueryStr = '';
if (pQuery && !((service === 'iam' || service === 'ring' ||
service === 'sts') &&
if (pQuery && !((service === 'iam' || service === 'ring') &&
pHttpVerb === 'POST')) {
const sortedQueryParams = Object.keys(pQuery).sort().map(key => {
const encodedKey = awsURIencode(key);

View File

@ -98,7 +98,7 @@ function check(request, log, data, awsService) {
log);
if (validationResult instanceof Error) {
log.debug('credentials in improper format', { credentialsArr,
timestamp, validationResult });
timestamp, validationResult });
return { err: validationResult };
}
// credentialsArr is [accessKey, date, region, aws-service, aws4_request]

View File

@ -48,7 +48,7 @@ function check(request, log, data) {
log);
if (validationResult instanceof Error) {
log.debug('credentials in improper format', { credential,
timestamp, validationResult });
timestamp, validationResult });
return { err: validationResult };
}
const accessKey = credential[0];

View File

@ -41,9 +41,8 @@ function validateCredentials(credentials, timestamp, log) {
{ scopeDate, timestampDate });
return errors.RequestTimeTooSkewed;
}
if (service !== 's3' && service !== 'iam' && service !== 'ring' &&
service !== 'sts') {
log.warn('service in credentials is not one of s3/iam/ring/sts', {
if (service !== 's3' && service !== 'iam' && service !== 'ring') {
log.warn('service in credentials is not one of s3/iam/ring', {
service,
});
return errors.InvalidArgument;

View File

@ -20,7 +20,6 @@ module.exports = {
// no authentication information. Requestor can access
// only public resources
publicId: 'http://acs.amazonaws.com/groups/global/AllUsers',
zenkoServiceAccount: 'http://acs.zenko.io/accounts/service',
metadataFileNamespace: '/MDFile',
dataFileURL: '/DataFile',
// AWS states max size for user-defined metadata
@ -30,41 +29,4 @@ module.exports = {
// so we do the same.
maximumMetaHeadersSize: 2136,
emptyFileMd5: 'd41d8cd98f00b204e9800998ecf8427e',
// Version 2 changes the format of the data location property
// Version 3 adds the dataStoreName attribute
mdModelVersion: 3,
/*
* Splitter is used to build the object name for the overview of a
* multipart upload and to build the object names for each part of a
* multipart upload. These objects with large names are then stored in
* metadata in a "shadow bucket" to a real bucket. The shadow bucket
* contains all ongoing multipart uploads. We include in the object
* name some of the info we might need to pull about an open multipart
* upload or about an individual part with each piece of info separated
* by the splitter. We can then extract each piece of info by splitting
* the object name string with this splitter.
* For instance, assuming a splitter of '...!*!',
* the name of the upload overview would be:
* overview...!*!objectKey...!*!uploadId
* For instance, the name of a part would be:
* uploadId...!*!partNumber
*
* The sequence of characters used in the splitter should not occur
* elsewhere in the pieces of info to avoid splitting where not
* intended.
*
* Splitter is also used in adding bucketnames to the
* namespacerusersbucket. The object names added to the
* namespaceusersbucket are of the form:
* canonicalID...!*!bucketname
*/
splitter: '..|..',
usersBucket: 'users..bucket',
// MPU Bucket Prefix is used to create the name of the shadow
// bucket used for multipart uploads. There is one shadow mpu
// bucket per bucket and its name is the mpuBucketPrefix followed
// by the name of the final destination bucket for the object
// once the multipart upload is complete.
mpuBucketPrefix: 'mpuShadowBucket',
};

View File

@ -1,54 +0,0 @@
const Redis = require('ioredis');
class RedisClient {
/**
* @constructor
* @param {Object} config - config
* @param {string} config.host - Redis host
* @param {number} config.port - Redis port
* @param {string} config.password - Redis password
* @param {werelogs.Logger} logger - logger instance
*/
constructor(config, logger) {
this._client = new Redis(config);
this._client.on('error', err =>
logger.trace('error from redis', {
error: err,
method: 'RedisClient.constructor',
redisHost: config.host,
redisPort: config.port,
})
);
return this;
}
/**
* increment value of a key by 1 and set a ttl
* @param {string} key - key holding the value
* @param {number} expiry - expiry in seconds
* @param {callback} cb - callback
* @return {undefined}
*/
incrEx(key, expiry, cb) {
return this._client
.multi([['incr', key], ['expire', key, expiry]])
.exec(cb);
}
/**
* execute a batch of commands
* @param {string[]} cmds - list of commands
* @param {callback} cb - callback
* @return {undefined}
*/
batch(cmds, cb) {
return this._client.pipeline(cmds).exec(cb);
}
clear(cb) {
return this._client.flushdb(cb);
}
}
module.exports = RedisClient;

View File

@ -1,150 +0,0 @@
const async = require('async');
class StatsClient {
/**
* @constructor
* @param {object} redisClient - RedisClient instance
* @param {number} interval - sampling interval by seconds
* @param {number} expiry - sampling duration by seconds
*/
constructor(redisClient, interval, expiry) {
this._redis = redisClient;
this._interval = interval;
this._expiry = expiry;
return this;
}
/*
* Utility function to use when callback is undefined
*/
_noop() {}
/**
* normalize to the nearest interval
* @param {object} d - Date instance
* @return {number} timestamp - normalized to the nearest interval
*/
_normalizeTimestamp(d) {
const s = d.getSeconds();
return d.setSeconds(s - s % this._interval, 0);
}
/**
* set timestamp to the previous interval
* @param {object} d - Date instance
* @return {number} timestamp - set to the previous interval
*/
_setPrevInterval(d) {
return d.setSeconds(d.getSeconds() - this._interval);
}
/**
* build redis key to get total number of occurrences on the server
* @param {string} name - key name identifier
* @param {object} d - Date instance
* @return {string} key - key for redis
*/
_buildKey(name, d) {
return `${name}:${this._normalizeTimestamp(d)}`;
}
/**
* reduce the array of values to a single value
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
* @param {array} arr - Date instance
* @return {string} key - key for redis
*/
_getCount(arr) {
return arr.reduce((prev, a) => {
let num = parseInt(a[1], 10);
num = Number.isNaN(num) ? 0 : num;
return prev + num;
}, 0);
}
/**
* report/record a new request received on the server
* @param {string} id - service identifier
* @param {callback} cb - callback
* @return {undefined}
*/
reportNewRequest(id, cb) {
if (!this._redis) {
return undefined;
}
const callback = cb || this._noop;
const key = this._buildKey(`${id}:requests`, new Date());
return this._redis.incrEx(key, this._expiry, callback);
}
/**
* report/record a request that ended up being a 500 on the server
* @param {string} id - service identifier
* @param {callback} cb - callback
* @return {undefined}
*/
report500(id, cb) {
if (!this._redis) {
return undefined;
}
const callback = cb || this._noop;
const key = this._buildKey(`${id}:500s`, new Date());
return this._redis.incrEx(key, this._expiry, callback);
}
/**
* get stats for the last x seconds, x being the sampling duration
* @param {object} log - Werelogs request logger
* @param {string} id - service identifier
* @param {callback} cb - callback to call with the err/result
* @return {undefined}
*/
getStats(log, id, cb) {
if (!this._redis) {
return cb(null, {});
}
const d = new Date();
const totalKeys = Math.floor(this._expiry / this._interval);
const reqsKeys = [];
const req500sKeys = [];
for (let i = 0; i < totalKeys; i++) {
reqsKeys.push(['get', this._buildKey(`${id}:requests`, d)]);
req500sKeys.push(['get', this._buildKey(`${id}:500s`, d)]);
this._setPrevInterval(d);
}
return async.parallel([
next => this._redis.batch(reqsKeys, next),
next => this._redis.batch(req500sKeys, next),
], (err, results) => {
/**
* Batch result is of the format
* [ [null, '1'], [null, '2'], [null, '3'] ] where each
* item is the result of the each batch command
* Foreach item in the result, index 0 signifies the error and
* index 1 contains the result
*/
const statsRes = {
'requests': 0,
'500s': 0,
'sampleDuration': this._expiry,
};
if (err) {
log.error('error getting stats', {
error: err,
method: 'StatsClient.getStats',
});
/**
* Redis for stats is not a critial component, ignoring
* any error here as returning an InternalError
* would be confused with the health of the service
*/
return cb(null, statsRes);
}
statsRes.requests = this._getCount(results[0]);
statsRes['500s'] = this._getCount(results[1]);
return cb(null, statsRes);
});
}
}
module.exports = StatsClient;

View File

@ -1,106 +0,0 @@
const errors = require('../errors');
const validServices = {
aws: ['s3', 'iam', 'sts', 'ring'],
scality: ['utapi', 'sso'],
};
class ARN {
/**
*
* Create an ARN object from its individual components
*
* @constructor
* @param {string} partition - ARN partition (e.g. 'aws')
* @param {string} service - service name in partition (e.g. 's3')
* @param {string} [region] - AWS region
* @param {string} [accountId] - AWS 12-digit account ID
* @param {string} resource - AWS resource path (e.g. 'foo/bar')
*/
constructor(partition, service, region, accountId, resource) {
this._partition = partition;
this._service = service;
this._region = region || null;
this._accountId = accountId || null;
this._resource = resource;
}
static createFromString(arnStr) {
const [arn, partition, service, region, accountId,
resourceType, resource] = arnStr.split(':');
if (arn !== 'arn') {
return { error: errors.InvalidArgument.customizeDescription(
'bad ARN: must start with "arn:"') };
}
if (!partition) {
return { error: errors.InvalidArgument.customizeDescription(
'bad ARN: must include a partition name, like "aws" in ' +
'"arn:aws:..."') };
}
if (!service) {
return { error: errors.InvalidArgument.customizeDescription(
'bad ARN: must include a service name, like "s3" in ' +
'"arn:aws:s3:..."') };
}
if (validServices[partition] === undefined) {
return { error: errors.InvalidArgument.customizeDescription(
`bad ARN: unknown partition "${partition}", should be a ` +
'valid partition name like "aws" in "arn:aws:..."') };
}
if (!validServices[partition].includes(service)) {
return { error: errors.InvalidArgument.customizeDescription(
`bad ARN: unsupported ${partition} service "${service}"`) };
}
if (accountId && !/^([0-9]{12}|[*])$/.test(accountId)) {
return { error: errors.InvalidArgument.customizeDescription(
`bad ARN: bad account ID "${accountId}": ` +
'must be a 12-digit number or "*"') };
}
const fullResource = (resource !== undefined ?
`${resourceType}:${resource}` : resourceType);
return new ARN(partition, service, region, accountId, fullResource);
}
getPartition() {
return this._partition;
}
getService() {
return this._service;
}
getRegion() {
return this._region;
}
getAccountId() {
return this._accountId;
}
getResource() {
return this._resource;
}
isIAMAccount() {
return this.getService() === 'iam'
&& this.getAccountId() !== null
&& this.getAccountId() !== '*'
&& this.getResource() === 'root';
}
isIAMUser() {
return this.getService() === 'iam'
&& this.getAccountId() !== null
&& this.getAccountId() !== '*'
&& this.getResource().startsWith('user/');
}
isIAMRole() {
return this.getService() === 'iam'
&& this.getAccountId() !== null
&& this.getResource().startsWith('role');
}
toString() {
return ['arn', this.getPartition(), this.getService(),
this.getRegion(), this.getAccountId(), this.getResource()]
.join(':');
}
}
module.exports = ARN;

View File

@ -1,66 +1,23 @@
const constants = require('../constants');
const VersionIDUtils = require('../versioning/VersionID');
const ObjectMDLocation = require('./ObjectMDLocation');
// Version 2 changes the format of the data location property
// Version 3 adds the dataStoreName attribute
const modelVersion = 3;
/**
* Class to manage metadata object for regular s3 objects (instead of
* mpuPart metadata for example)
*/
class ObjectMD {
module.exports = class ObjectMD {
/**
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
* reserved for internal use, users should call
* {@link ObjectMD.createFromBlob()} to load from a stored
* metadata blob and check the returned value for errors.
*
* @constructor
* @param {ObjectMD|object} [objMd] - object metadata source,
* either an ObjectMD instance or a native JS object parsed from
* JSON
*/
constructor(objMd = undefined) {
this._initMd();
if (objMd !== undefined) {
if (objMd instanceof ObjectMD) {
this._updateFromObjectMD(objMd);
} else {
this._updateFromParsedJSON(objMd);
}
} else {
// set newly-created object md modified time to current time
this._data['last-modified'] = new Date().toJSON();
}
// set latest md model version now that we ensured
// backward-compat conversion
this._data['md-model-version'] = constants.mdModelVersion;
}
/**
* create an ObjectMD instance from stored metadata
*
* @param {String|Buffer} storedBlob - serialized metadata blob
* @return {object} a result object containing either a 'result'
* property which value is a new ObjectMD instance on success, or
* an 'error' property on error
* @param {number} version - Version of the metadata model
*/
static createFromBlob(storedBlob) {
try {
const objMd = JSON.parse(storedBlob);
return { result: new ObjectMD(objMd) };
} catch (err) {
return { error: err };
}
}
getSerialized() {
return JSON.stringify(this.getValue());
}
_initMd() {
// initialize md with default values
constructor(version) {
const now = new Date().toJSON();
this._data = {
'md-model-version': version || modelVersion,
'owner-display-name': '',
'owner-id': '',
'cache-control': '',
@ -69,6 +26,7 @@ class ObjectMD {
'expires': '',
'content-length': 0,
'content-type': '',
'last-modified': now,
'content-md5': '',
// simple/no version. will expand once object versioning is
// introduced
@ -90,7 +48,7 @@ class ObjectMD {
READ_ACP: [],
},
'key': '',
'location': null,
'location': [],
'isNull': '',
'nullVersionId': '',
'isDeleteMarker': '',
@ -102,37 +60,18 @@ class ObjectMD {
destination: '',
storageClass: '',
role: '',
storageType: '',
dataStoreVersionId: '',
},
'dataStoreName': '',
};
}
_updateFromObjectMD(objMd) {
// We only duplicate selected attributes here, where setters
// allow to change inner values, and let the others as shallow
// copies. Since performance is a concern, we want to avoid
// the JSON.parse(JSON.stringify()) method.
Object.assign(this._data, objMd._data);
Object.assign(this._data.replicationInfo,
objMd._data.replicationInfo);
}
_updateFromParsedJSON(objMd) {
// objMd is a new JS object created for the purpose, it's safe
// to just assign its top-level properties.
Object.assign(this._data, objMd);
this._convertToLatestModel();
}
_convertToLatestModel() {
// handle backward-compat stuff
if (typeof(this._data.location) === 'string') {
this.setLocation([{ key: this._data.location }]);
}
/**
* Returns metadata model version
*
* @return {number} Metadata model version
*/
getModelVersion() {
return this._data['md-model-version'];
}
/**
@ -523,53 +462,21 @@ class ObjectMD {
/**
* Set location
*
* @param {object[]} location - array of data locations (see
* constructor of {@link ObjectMDLocation} for a description of
* fields for each array object)
* @param {string[]} location - location
* @return {ObjectMD} itself
*/
setLocation(location) {
if (!Array.isArray(location) || location.length === 0) {
this._data.location = null;
} else {
this._data.location = location;
}
this._data.location = location;
return this;
}
/**
* Returns location
*
* @return {object[]} location
* @return {string[]} location
*/
getLocation() {
const { location } = this._data;
return Array.isArray(location) ? location : [];
}
// Object metadata may contain multiple elements for a single part if
// the part was originally copied from another MPU. Here we reduce the
// locations array to a single element for each part.
getReducedLocations() {
const locations = this.getLocation();
const reducedLocations = [];
let partTotal = 0;
for (let i = 0; i < locations.length; i++) {
const currPart = new ObjectMDLocation(locations[i]);
const currPartNum = currPart.getPartNumber();
let nextPartNum = undefined;
if (i < locations.length - 1) {
const nextPart = new ObjectMDLocation(locations[i + 1]);
nextPartNum = nextPart.getPartNumber();
}
partTotal += currPart.getPartSize();
if (currPartNum !== nextPartNum) {
currPart.setPartSize(partTotal);
reducedLocations.push(currPart.getValue());
partTotal = 0;
}
}
return reducedLocations;
return this._data.location;
}
/**
@ -652,16 +559,6 @@ class ObjectMD {
return this._data.versionId;
}
/**
* Get metadata versionId value in encoded form (the one visible
* to the S3 API user)
*
* @return {string} The encoded object versionId
*/
getEncodedVersionId() {
return VersionIDUtils.encode(this.getVersionId());
}
/**
* Set tags
*
@ -689,16 +586,14 @@ class ObjectMD {
* @return {ObjectMD} itself
*/
setReplicationInfo(replicationInfo) {
const { status, content, destination, storageClass, role,
storageType, dataStoreVersionId } = replicationInfo;
const { status, content, destination, storageClass, role } =
replicationInfo;
this._data.replicationInfo = {
status,
content,
destination,
storageClass: storageClass || '',
role,
storageType: storageType || '',
dataStoreVersionId: dataStoreVersionId || '',
};
return this;
}
@ -712,45 +607,6 @@ class ObjectMD {
return this._data.replicationInfo;
}
setReplicationStatus(status) {
this._data.replicationInfo.status = status;
return this;
}
setReplicationDataStoreVersionId(versionId) {
this._data.replicationInfo.dataStoreVersionId = versionId;
return this;
}
getReplicationDataStoreVersionId() {
return this._data.replicationInfo.dataStoreVersionId;
}
getReplicationStatus() {
return this._data.replicationInfo.status;
}
getReplicationContent() {
return this._data.replicationInfo.content;
}
getReplicationRoles() {
return this._data.replicationInfo.role;
}
getReplicationStorageType() {
return this._data.replicationInfo.storageType;
}
getReplicationStorageClass() {
return this._data.replicationInfo.storageClass;
}
getReplicationTargetBucket() {
const destBucketArn = this._data.replicationInfo.destination;
return destBucketArn.split(':').slice(-1)[0];
}
/**
* Set dataStoreName
*
@ -771,19 +627,6 @@ class ObjectMD {
return this._data.dataStoreName;
}
/**
* Get dataStoreVersionId
*
* @return {string} external backend version id for data
*/
getDataStoreVersionId() {
const location = this.getLocation();
if (!location[0]) {
return undefined;
}
return location[0].dataStoreVersionId;
}
/**
* Set custom meta headers
*
@ -822,6 +665,4 @@ class ObjectMD {
getValue() {
return this._data;
}
}
module.exports = ObjectMD;
};

View File

@ -1,72 +0,0 @@
/**
* Helper class to ease access to a single data location in metadata
* 'location' array
*/
class ObjectMDLocation {
/**
* @constructor
* @param {object} locationObj - single data location info
* @param {string} locationObj.key - data backend key
* @param {number} locationObj.start - index of first data byte of
* this part in the full object
* @param {number} locationObj.size - byte length of data part
* @param {string} locationObj.dataStoreName - type of data store
* @param {string} locationObj.dataStoreETag - internal ETag of
* data part
*/
constructor(locationObj) {
this._data = {
key: locationObj.key,
start: locationObj.start,
size: locationObj.size,
dataStoreName: locationObj.dataStoreName,
dataStoreETag: locationObj.dataStoreETag,
};
}
getKey() {
return this._data.key;
}
getDataStoreName() {
return this._data.dataStoreName;
}
setDataLocation(location) {
this._data.key = location.key;
this._data.dataStoreName = location.dataStoreName;
return this;
}
getDataStoreETag() {
return this._data.dataStoreETag;
}
getPartNumber() {
return Number.parseInt(this._data.dataStoreETag.split(':')[0], 10);
}
getPartETag() {
return this._data.dataStoreETag.split(':')[1];
}
getPartStart() {
return this._data.start;
}
getPartSize() {
return this._data.size;
}
setPartSize(size) {
this._data.size = size;
return this;
}
getValue() {
return this._data;
}
}
module.exports = ObjectMDLocation;

View File

@ -58,8 +58,6 @@ class ReplicationConfiguration {
this._role = null;
this._destination = null;
this._rules = null;
this._prevStorageClass = null;
this._isExternalLocation = null;
}
/**
@ -150,16 +148,11 @@ class ReplicationConfiguration {
}
const role = parsedRole[0];
const rolesArr = role.split(',');
if (!this._isExternalLocation && rolesArr.length !== 2) {
if (rolesArr.length !== 2) {
return errors.InvalidArgument.customizeDescription(
'Invalid Role specified in replication configuration: ' +
'Role must be a comma-separated list of two IAM roles');
}
if (this._isExternalLocation && rolesArr.length > 1) {
return errors.InvalidArgument.customizeDescription(
'Invalid Role specified in replication configuration: ' +
'Role may not contain a comma separator');
}
const invalidRole = rolesArr.find(r => !this._isValidRoleARN(r));
if (invalidRole !== undefined) {
return errors.InvalidArgument.customizeDescription(
@ -275,6 +268,22 @@ class ReplicationConfiguration {
return undefined;
}
/**
* Check that the `StorageClass` is a valid class
* @param {string} storageClass - The storage class to validate
* @return {boolean} `true` if valid, otherwise `false`
*/
_isValidStorageClass(storageClass) {
if (!this._config) {
return validStorageClasses.includes(storageClass);
}
const replicationEndpoints = this._config.replicationEndpoints
.map(endpoint => endpoint.site);
return replicationEndpoints.includes(storageClass) ||
validStorageClasses.includes(storageClass);
}
/**
* Check that the `StorageClass` property is valid
* @param {object} destination - The destination object from this._parsedXML
@ -283,28 +292,9 @@ class ReplicationConfiguration {
_parseStorageClass(destination) {
const storageClass = destination.StorageClass &&
destination.StorageClass[0];
if (!this._config) {
return validStorageClasses.includes(storageClass);
}
const replicationEndpoints = this._config.replicationEndpoints
.map(endpoint => endpoint.site);
const locationConstraints =
Object.keys(this._config.locationConstraints);
if (locationConstraints.includes(storageClass)) {
if (this._prevStorageClass !== null &&
this._prevStorageClass !== storageClass) {
return errors.InvalidRequest.customizeDescription(
'The storage class must be same for all rules when ' +
'replicating objects to an external location');
}
this._isExternalLocation = true;
}
if (!replicationEndpoints.includes(storageClass) &&
!locationConstraints.includes(storageClass) &&
!validStorageClasses.includes(storageClass)) {
if (!this._isValidStorageClass(storageClass)) {
return errors.MalformedXML;
}
this._prevStorageClass = storageClass;
return undefined;
}
@ -369,11 +359,11 @@ class ReplicationConfiguration {
* @return {undefined}
*/
parseConfiguration() {
const err = this._parseRules();
const err = this._parseRole() || this._parseRules();
if (err) {
return err;
}
return this._parseRole();
return undefined;
}
/**

View File

@ -34,6 +34,8 @@ class RoundRobin {
throw new Error(
'at least one host must be provided for round robin');
}
this.hostsList = hostsList.map(item => this._validateHostObj(item));
if (options && options.logger) {
this.logger = options.logger;
}
@ -42,11 +44,6 @@ class RoundRobin {
} else {
this.stickyCount = DEFAULT_STICKY_COUNT;
}
if (options && options.defaultPort) {
this.defaultPort = Number.parseInt(options.defaultPort, 10);
}
this.hostsList = hostsList.map(item => this._validateHostObj(item));
// TODO: add blacklisting capability
@ -93,8 +90,7 @@ class RoundRobin {
port: parsedPort,
};
}
return { host: hostItemObj.host,
port: this.defaultPort };
return { host: hostItemObj.host };
}
/**

View File

@ -66,7 +66,7 @@ function getByteRangeFromSpec(rangeSpec, objectSize) {
return {};
}
return { range: [Math.max(objectSize - rangeSpec.suffix, 0),
objectSize - 1] };
objectSize - 1] };
}
if (rangeSpec.start < objectSize) {
// test is false if end is undefined
@ -105,5 +105,5 @@ function parseRange(rangeHeader, objectSize) {
}
module.exports = { parseRangeSpec,
getByteRangeFromSpec,
parseRange };
getByteRangeFromSpec,
parseRange };

View File

@ -31,10 +31,10 @@ function sendError(res, log, error, optMessage) {
message = error.description || '';
}
log.debug('sending back error response', { httpCode: error.code,
errorType: error.message,
error: message });
errorType: error.message,
error: message });
res.end(`${JSON.stringify({ errorType: error.message,
errorMessage: message })}\n`);
errorMessage: message })}\n`);
}
/**
@ -150,7 +150,7 @@ class RESTServer extends httpServer {
const reqUids = req.headers['x-scal-request-uids'];
const log = this.createLogger(reqUids);
log.debug('request received', { method: req.method,
url: req.url });
url: req.url });
if (req.method in this.reqMethods) {
this.reqMethods[req.method](req, res, log);
} else {
@ -176,7 +176,7 @@ class RESTServer extends httpServer {
throw errors.MissingContentLength;
}
size = Number.parseInt(contentLength, 10);
if (Number.isNaN(size)) {
if (isNaN(size)) {
throw errors.InvalidInput.customizeDescription(
'bad Content-Length');
}

View File

@ -55,7 +55,7 @@ class LevelDbClient extends rpc.BaseClient {
*/
openSub(subName) {
const subDbClient = new LevelDbClient({ url: this.url,
logger: this.logger });
logger: this.logger });
// make the same exposed RPC calls available from the sub-level object
Object.assign(subDbClient, this);
// listeners should not be duplicated on sublevel

View File

@ -293,7 +293,7 @@ class BaseService {
exposedAPI.push({ name: callName });
});
return { apiVersion: this.apiVersion,
api: exposedAPI };
api: exposedAPI };
},
});
@ -524,7 +524,7 @@ function RPCServer(params) {
function sendHTTPError(res, err) {
res.writeHead(err.code || 500);
return res.end(`${JSON.stringify({ error: err.message,
message: err.description })}\n`);
message: err.description })}\n`);
}
/**
@ -596,7 +596,7 @@ function objectStreamToJSON(rstream, wstream, cb) {
streamRPCJSONObj = function _streamRPCJSONObj(obj, wstream, cb) {
const cbOnce = jsutil.once(cb);
if (typeof obj === 'object') {
if (typeof(obj) === 'object') {
if (obj && obj.pipe !== undefined) {
// stream object streams as JSON arrays
return objectStreamToJSON(obj, wstream, cbOnce);
@ -734,7 +734,7 @@ function RESTServer(params) {
* @return {undefined}
*/
httpServer.registerServices = function registerServices(...serviceList) {
this.serviceList.push(...serviceList);
this.serviceList.push.apply(this.serviceList, serviceList);
};
return httpServer;

View File

@ -134,7 +134,8 @@ class SIOInputStream extends stream.Readable {
if (this._destroyed) {
return;
}
this._readState.pushBuffer.push(...data);
this._readState.pushBuffer.push.apply(this._readState.pushBuffer,
data);
if (this._readState.readable) {
this._pushData();
}
@ -259,9 +260,9 @@ class SIOStreamSocket {
return arg;
}
const log = this.logger;
const isReadStream = (typeof arg.pipe === 'function'
const isReadStream = (typeof(arg.pipe) === 'function'
&& typeof (arg.read) === 'function');
let isWriteStream = (typeof arg.write === 'function');
let isWriteStream = (typeof(arg.write) === 'function');
if (isReadStream || isWriteStream) {
if (isReadStream && isWriteStream) {
@ -302,7 +303,7 @@ class SIOStreamSocket {
}
return encodedStream;
}
if (typeof arg === 'object') {
if (typeof(arg) === 'object') {
let encodedObj;
if (Array.isArray(arg)) {
encodedObj = [];
@ -376,7 +377,7 @@ class SIOStreamSocket {
});
return stream;
}
if (typeof arg === 'object') {
if (typeof(arg) === 'object') {
let decodedObj;
if (Array.isArray(arg)) {
decodedObj = [];
@ -411,7 +412,7 @@ class SIOStreamSocket {
_error(streamId, error) {
this.logger.debug('emit \'stream-error\' event', { streamId, error });
this.socket.emit('stream-error', { streamId,
error: flattenError(error) });
error: flattenError(error) });
}
_hangup(streamId) {

View File

@ -91,10 +91,6 @@ const _actionMapSSO = {
SsoAuthorize: 'sso:Authorize',
};
const _actionMapSTS = {
assumeRole: 'sts:AssumeRole',
};
function _findAction(service, method) {
if (service === 's3') {
return _actionMap[method];
@ -112,9 +108,6 @@ function _findAction(service, method) {
// currently only method is ListMetrics
return `utapi:${method}`;
}
if (service === 'sts') {
return _actionMapSTS[method];
}
return undefined;
}
@ -130,17 +123,13 @@ function _buildArn(service, generalResource, specificResource, requesterInfo) {
}
return 'arn:aws:s3:::';
}
if (service === 'iam' || service === 'sts') {
if (service === 'iam') {
// arn:aws:iam::<account-id>:<resource-type><resource>
let accountId = requesterInfo.accountid;
if (service === 'sts') {
accountId = requesterInfo.targetAccountId;
}
if (specificResource) {
return `arn:aws:iam::${accountId}:` +
return `arn:aws:iam::${requesterInfo.accountid}:` +
`${generalResource}${specificResource}`;
}
return `arn:aws:iam::${accountId}:${generalResource}`;
return `arn:aws:iam::${requesterInfo.accountid}:${generalResource}`;
}
if (service === 'ring') {
// arn:aws:iam::<account-id>:<resource-type><resource>
@ -188,7 +177,6 @@ function _buildArn(service, generalResource, specificResource, requesterInfo) {
* @param {string} authType - type of authentication used
* @param {number} signatureAge - age of signature in milliseconds
* @param {string} securityToken - auth security token (temporary credentials)
* @param {string} policyArn - policy arn
* @return {RequestContext} a RequestContext instance
*/
@ -196,8 +184,7 @@ class RequestContext {
constructor(headers, query, generalResource, specificResource,
requesterIp, sslEnabled, apiMethod,
awsService, locationConstraint, requesterInfo,
signatureVersion, authType, signatureAge, securityToken, policyArn,
action) {
signatureVersion, authType, signatureAge, securityToken) {
this._headers = headers;
this._query = query;
this._requesterIp = requesterIp;
@ -223,8 +210,7 @@ class RequestContext {
this._authType = authType;
this._signatureAge = signatureAge;
this._securityToken = securityToken;
this._policyArn = policyArn;
this._action = action;
return this;
}
@ -251,8 +237,6 @@ class RequestContext {
locationConstraint: this._locationConstraint,
tokenIssueTime: this._tokenIssueTime,
securityToken: this._securityToken,
policyArn: this._policyArn,
action: this._action,
};
return JSON.stringify(requestInfo);
}
@ -273,8 +257,7 @@ class RequestContext {
obj.specificResource, obj.requesterIp, obj.sslEnabled,
obj.apiMethod, obj.awsService, obj.locationConstraint,
obj.requesterInfo, obj.signatureVersion,
obj.authType, obj.signatureAge, obj.securityToken, obj.policyArn,
obj.action);
obj.authType, obj.signatureAge, obj.securityToken);
}
/**
@ -282,9 +265,6 @@ class RequestContext {
* @return {string} action
*/
getAction() {
if (this._action) {
return this._action;
}
if (this._foundAction) {
return this._foundAction;
}
@ -375,26 +355,6 @@ class RequestContext {
return parseIp(this._requesterIp);
}
getRequesterAccountId() {
return this._requesterInfo.accountid;
}
getRequesterEndArn() {
return this._requesterInfo.arn;
}
getRequesterExternalId() {
return this._requesterInfo.externalId;
}
getRequesterPrincipalArn() {
return this._requesterInfo.parentArn || this._requesterInfo.arn;
}
getRequesterType() {
return this._requesterInfo.principalType;
}
/**
* Set sslEnabled
* @param {boolean} sslEnabled - true if https used
@ -588,26 +548,6 @@ class RequestContext {
this._securityToken = token;
return this;
}
/**
* Get the policy arn
*
* @return {string} policyArn - Policy arn
*/
getPolicyArn() {
return this._policyArn;
}
/**
* Set the policy arn
*
* @param {string} policyArn - Policy arn
* @return {RequestContext} itself
*/
setPolicyArn(policyArn) {
this._policyArn = policyArn;
return this;
}
}
module.exports = RequestContext;

View File

@ -38,7 +38,7 @@ function isResourceApplicable(requestContext, statementResource, log) {
// Pull just the relative id because there is no restriction that it
// does not contain ":"
const requestRelativeId = requestResourceArr.slice(5).join(':');
for (let i = 0; i < statementResource.length; i++) {
for (let i = 0; i < statementResource.length; i ++) {
// Handle variables (must handle BEFORE wildcards)
const policyResource =
substituteVariables(statementResource[i], requestContext);
@ -73,7 +73,7 @@ function isActionApplicable(requestAction, statementAction, log) {
statementAction = [statementAction];
}
const length = statementAction.length;
for (let i = 0; i < length; i++) {
for (let i = 0; i < length; i ++) {
// No variables in actions so no need to handle
const regExStrOfStatementAction =
handleWildcards(statementAction[i]);
@ -98,12 +98,12 @@ function isActionApplicable(requestAction, statementAction, log) {
* @param {Object} log - logger
* @return {boolean} true if meet conditions, false if not
*/
evaluators.meetConditions = (requestContext, statementCondition, log) => {
function meetConditions(requestContext, statementCondition, log) {
// The Condition portion of a policy is an object with different
// operators as keys
const operators = Object.keys(statementCondition);
const length = operators.length;
for (let i = 0; i < length; i++) {
for (let i = 0; i < length; i ++) {
const operator = operators[i];
const hasIfExistsCondition = operator.endsWith('IfExists');
// If has "IfExists" added to operator name, find operator name
@ -119,7 +119,8 @@ evaluators.meetConditions = (requestContext, statementCondition, log) => {
const conditionsWithSameOperator = statementCondition[operator];
const conditionKeys = Object.keys(conditionsWithSameOperator);
const conditionKeysLength = conditionKeys.length;
for (let j = 0; j < conditionKeysLength; j++) {
for (let j = 0; j < conditionKeysLength;
j ++) {
const key = conditionKeys[j];
let value = conditionsWithSameOperator[key];
if (!Array.isArray(value)) {
@ -164,13 +165,13 @@ evaluators.meetConditions = (requestContext, statementCondition, log) => {
// are the only operators where wildcards are allowed
if (!operatorFunction(keyBasedOnRequestContext, value)) {
log.trace('did not satisfy condition', { operator: bareOperator,
keyBasedOnRequestContext, policyValue: value });
keyBasedOnRequestContext, policyValue: value });
return false;
}
}
}
return true;
};
}
/**
* Evaluate whether a request is permitted under a policy.
@ -221,8 +222,7 @@ evaluators.evaluatePolicy = (requestContext, policy, log) => {
continue;
}
// If do not meet conditions move on to next statement
if (currentStatement.Condition &&
!evaluators.meetConditions(requestContext,
if (currentStatement.Condition && !meetConditions(requestContext,
currentStatement.Condition, log)) {
continue;
}

View File

@ -1,176 +0,0 @@
const { meetConditions } = require('./evaluator');
/**
* Class with methods to manage the policy 'principal' validation
*/
class Principal {
/**
* Function to evaluate conditions if needed
*
* @param {object} params - Evaluation parameters
* @param {object} statement - Statement policy field
* @return {boolean} True if meet conditions
*/
static _evaluateCondition(params, statement) {
if (statement.Condition) {
return meetConditions(params.rc, statement.Condition, params.log);
}
return true;
}
/**
* Checks principal field against valid principals array
*
* @param {object} params - Evaluation parameters
* @param {object} statement - Statement policy field
* @param {object} valids - Valid principal fields
* @return {string} result of principal evaluation, either 'Neutral',
* 'Allow' or 'Deny'
*/
static _evaluatePrincipalField(params, statement, valids) {
const reverse = !!statement.NotPrincipal;
const principal = statement.Principal || statement.NotPrincipal;
if (typeof principal === 'string' && principal === '*') {
if (reverse) {
// In case of anonymous NotPrincipal, this will neutral everyone
return 'Neutral';
}
if (!Principal._evaluateCondition(params, statement)) {
return 'Neutral';
}
return statement.Effect;
} else if (typeof principal === 'string') {
return 'Deny';
}
let ref = [];
let toCheck = [];
if (valids.Federated && principal.Federated) {
ref = valids.Federated;
toCheck = principal.Federated;
} else if (valids.AWS && principal.AWS) {
ref = valids.AWS;
toCheck = principal.AWS;
} else if (valids.Service && principal.Service) {
ref = valids.Service;
toCheck = principal.Service;
} else {
if (reverse) {
return statement.Effect;
}
return 'Neutral';
}
toCheck = Array.isArray(toCheck) ? toCheck : [toCheck];
ref = Array.isArray(ref) ? ref : [ref];
if (toCheck.indexOf('*') !== -1) {
if (reverse) {
return 'Neutral';
}
if (!Principal._evaluateCondition(params, statement)) {
return 'Neutral';
}
return statement.Effect;
}
const len = ref.length;
for (let i = 0; i < len; ++i) {
if (toCheck.indexOf(ref[i]) !== -1) {
if (reverse) {
return 'Neutral';
}
if (!Principal._evaluateCondition(params, statement)) {
return 'Neutral';
}
return statement.Effect;
}
}
if (reverse) {
return statement.Effect;
}
return 'Neutral';
}
/**
* Function to evaluate principal of statements against a valid principal
* array
*
* @param {object} params - Evaluation parameters
* @param {object} valids - Valid principal fields
* @return {string} result of principal evaluation, either 'Allow' or 'Deny'
*/
static _evaluatePrincipal(params, valids) {
const doc = params.trustedPolicy;
let statements = doc.Statement;
if (!Array.isArray(statements)) {
statements = [statements];
}
const len = statements.length;
let authorized = 'Deny';
for (let i = 0; i < len; ++i) {
const statement = statements[i];
const result = Principal._evaluatePrincipalField(params,
statement, valids);
if (result === 'Deny') {
return 'Deny';
} else if (result === 'Allow') {
authorized = 'Allow';
}
}
return authorized;
}
/**
* Function to evaluate principal for a policy
*
* @param {object} params - Evaluation parameters
* @return {object} {
* result: 'Allow' or 'Deny',
* checkAction: true or false,
* }
*/
static evaluatePrincipal(params) {
let valids = null;
let checkAction = false;
const account = params.rc.getRequesterAccountId();
const targetAccount = params.targetAccountId;
const accountArn = `arn:aws:iam::${account}:root`;
const requesterArn = params.rc.getRequesterPrincipalArn();
const requesterEndArn = params.rc.getRequesterEndArn();
const requesterType = params.rc.getRequesterType();
if (account !== targetAccount) {
valids = {
AWS: [
account,
accountArn,
],
};
checkAction = true;
} else {
if (requesterType === 'User' || requesterType === 'AssumedRole' ||
requesterType === 'Federated') {
valids = {
AWS: [
account,
accountArn,
],
};
if (requesterType === 'User' ||
requesterType === 'AssumedRole') {
valids.AWS.push(requesterArn);
if (requesterEndArn !== requesterArn) {
valids.AWS.push(requesterEndArn);
}
} else {
valids.Federated = [requesterArn];
}
} else if (requesterType === 'Service') {
valids = { Service: requesterArn };
}
}
const result = Principal._evaluatePrincipal(params, valids);
return {
result,
checkAction,
};
}
}
module.exports = Principal;

View File

@ -14,7 +14,8 @@ const handleWildcardInResource =
*/
function checkArnMatch(policyArn, requestRelativeId, requestArnArr,
caseSensitive) {
const regExofArn = handleWildcardInResource(policyArn);
let regExofArn = handleWildcardInResource(policyArn);
regExofArn = caseSensitive ? regExofArn : regExofArn.toLowerCase();
// The relativeId is the last part of the ARN (for instance, a bucket and
// object name in S3)
// Join on ":" in case there were ":" in the relativeID at the end
@ -30,7 +31,7 @@ function checkArnMatch(policyArn, requestRelativeId, requestArnArr,
}
// Check the other parts of the ARN to make sure they match. If not,
// return false.
for (let j = 0; j < 5; j++) {
for (let j = 0; j < 5; j ++) {
const segmentRegEx = new RegExp(regExofArn[j]);
const requestSegment = caseSensitive ? requestArnArr[j] :
requestArnArr[j].toLowerCase();

View File

@ -144,8 +144,6 @@ conditions.findConditionKey = (key, requestContext) => {
// header
map.set('s3:ObjLocationConstraint',
headers['x-amz-meta-scal-location-constraint']);
map.set('sts:ExternalId', requestContext.getRequesterExternalId());
map.set('iam:PolicyArn', requestContext.getPolicyArn());
return map.get(key);
};

View File

@ -1,83 +0,0 @@
const EventEmitter = require('events');
/**
* Class to collect results of streaming subparts.
* Emits "done" event when streaming is complete and Azure has returned
* results for putting each of the subparts
* Emits "error" event if Azure returns an error for putting a subpart and
* streaming is in-progress
* @class ResultsCollector
*/
class ResultsCollector extends EventEmitter {
/**
* @constructor
*/
constructor() {
super();
this._results = [];
this._queue = 0;
this._streamingFinished = false;
}
/**
* ResultsCollector.pushResult - register result of putting one subpart
* and emit "done" or "error" events if appropriate
* @param {(Error|undefined)} err - error returned from Azure after
* putting a subpart
* @param {number} subPartIndex - the index of the subpart
* @emits ResultCollector#done
* @emits ResultCollector#error
* @return {undefined}
*/
pushResult(err, subPartIndex) {
this._results.push({
error: err,
subPartIndex,
});
this._queue--;
if (this._resultsComplete()) {
this.emit('done', err, this._results);
} else if (err) {
this.emit('error', err, subPartIndex);
}
}
/**
* ResultsCollector.pushOp - register operation to put another subpart
* @return {undefined};
*/
pushOp() {
this._queue++;
}
/**
* ResultsCollector.enableComplete - register streaming has finished,
* allowing ResultCollector#done event to be emitted when last result
* has been returned
* @return {undefined};
*/
enableComplete() {
this._streamingFinished = true;
}
_resultsComplete() {
return (this._queue === 0 && this._streamingFinished);
}
}
/**
* "done" event
* @event ResultCollector#done
* @type {(Error|undefined)} err - error returned by Azure putting last subpart
* @type {object[]} results - result for putting each of the subparts
* @property {Error} [results[].error] - error returned by Azure putting subpart
* @property {number} results[].subPartIndex - index of the subpart
*/
/**
* "error" event
* @event ResultCollector#error
* @type {(Error|undefined)} error - error returned by Azure last subpart
* @type {number} subPartIndex - index of the subpart
*/
module.exports = ResultsCollector;

View File

@ -1,126 +0,0 @@
const stream = require('stream');
/**
* Interface for streaming subparts.
* @class SubStreamInterface
*/
class SubStreamInterface {
/**
* @constructor
* @param {stream.Readable} sourceStream - stream to read for data
*/
constructor(sourceStream) {
this._sourceStream = sourceStream;
this._totalLengthCounter = 0;
this._lengthCounter = 0;
this._subPartIndex = 0;
this._currentStream = new stream.PassThrough();
}
/**
* SubStreamInterface.pauseStreaming - pause data flow
* @return {undefined}
*/
pauseStreaming() {
this._sourceStream.pause();
}
/**
* SubStreamInterface.resumeStreaming - resume data flow
* @return {undefined}
*/
resumeStreaming() {
this._sourceStream.resume();
}
/**
* SubStreamInterface.endStreaming - signal end of data for last stream,
* to be called when source stream has ended
* @return {undefined}
*/
endStreaming() {
this._totalLengthCounter += this._lengthCounter;
this._currentStream.end();
}
/**
* SubStreamInterface.stopStreaming - destroy streams,
* to be called when streaming must be stopped externally
* @param {stream.Readable} [piper] - a stream that is piping data into
* source stream
* @return {undefined}
*/
stopStreaming(piper) {
if (piper) {
piper.unpipe();
piper.destroy();
}
this._sourceStream.destroy();
this._currentStream.destroy();
}
/**
* SubStreamInterface.getLengthCounter - return length of bytes streamed
* for current subpart
* @return {number} - this._lengthCounter
*/
getLengthCounter() {
return this._lengthCounter;
}
/**
* SubStreamInterface.getTotalBytesStreamed - return total bytes streamed
* @return {number} - this._totalLengthCounter
*/
getTotalBytesStreamed() {
return this._totalLengthCounter;
}
/**
* SubStreamInterface.getCurrentStream - return subpart stream currently
* being written to from source stream
* @return {number} - this._currentStream
*/
getCurrentStream() {
return this._currentStream;
}
/**
* SubStreamInterface.transitionToNextStream - signal end of data for
* current stream, generate a new stream and start streaming to new stream
* @return {object} - return object containing new current stream and
* subpart index of current subpart
*/
transitionToNextStream() {
this.pauseStreaming();
this._currentStream.end();
this._totalLengthCounter += this._lengthCounter;
this._lengthCounter = 0;
this._subPartIndex++;
this._currentStream = new stream.PassThrough();
this.resumeStreaming();
return {
nextStream: this._currentStream,
subPartIndex: this._subPartIndex,
};
}
/**
* SubStreamInterface.write - write to the current stream
* @param {Buffer} chunk - a chunk of data
* @return {undefined}
*/
write(chunk) {
const ready = this._currentStream.write(chunk);
if (!ready) {
this.pauseStreaming();
this._currentStream.once('drain', () => {
this.resumeStreaming();
});
}
this._lengthCounter += chunk.length;
}
}
module.exports = SubStreamInterface;

View File

@ -1,224 +0,0 @@
const crypto = require('crypto');
const stream = require('stream');
const ResultsCollector = require('./ResultsCollector');
const SubStreamInterface = require('./SubStreamInterface');
const objectUtils = require('../objectUtils');
const MD5Sum = require('../MD5Sum');
const errors = require('../../errors');
const azureMpuUtils = {};
azureMpuUtils.splitter = '|';
azureMpuUtils.overviewMpuKey = 'azure_mpu';
azureMpuUtils.maxSubPartSize = 104857600;
azureMpuUtils.zeroByteETag = crypto.createHash('md5').update('').digest('hex');
azureMpuUtils.padString = (str, category) => {
const _padFn = {
left: (str, padString) =>
`${padString}${str}`.substr(-padString.length),
right: (str, padString) =>
`${str}${padString}`.substr(0, padString.length),
};
// It's a little more performant if we add pre-generated strings for each
// type of padding we want to apply, instead of using string.repeat() to
// create the padding.
const padSpec = {
partNumber: {
padString: '00000',
direction: 'left',
},
subPart: {
padString: '00',
direction: 'left',
},
part: {
padString:
'%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%',
direction: 'right',
},
};
const { direction, padString } = padSpec[category];
return _padFn[direction](str, padString);
};
// NOTE: If we want to extract the object name from these keys, we will need
// to use a similar method to _getKeyAndUploadIdFromMpuKey since the object
// name may have instances of the splitter used to delimit arguments
azureMpuUtils.getMpuSummaryKey = (objectName, uploadId) =>
`${objectName}${azureMpuUtils.splitter}${uploadId}`;
azureMpuUtils.getBlockId = (uploadId, partNumber, subPartIndex) => {
const paddedPartNumber = azureMpuUtils.padString(partNumber, 'partNumber');
const paddedSubPart = azureMpuUtils.padString(subPartIndex, 'subPart');
const splitter = azureMpuUtils.splitter;
const blockId = `${uploadId}${splitter}partNumber${paddedPartNumber}` +
`${splitter}subPart${paddedSubPart}${splitter}`;
return azureMpuUtils.padString(blockId, 'part');
};
azureMpuUtils.getSummaryPartId = (partNumber, eTag, size) => {
const paddedPartNumber = azureMpuUtils.padString(partNumber, 'partNumber');
const timestamp = Date.now();
const splitter = azureMpuUtils.splitter;
const summaryKey = `${paddedPartNumber}${splitter}${timestamp}` +
`${splitter}${eTag}${splitter}${size}${splitter}`;
return azureMpuUtils.padString(summaryKey, 'part');
};
azureMpuUtils.getSubPartInfo = dataContentLength => {
const numberFullSubParts =
Math.floor(dataContentLength / azureMpuUtils.maxSubPartSize);
const remainder = dataContentLength % azureMpuUtils.maxSubPartSize;
const numberSubParts = remainder ?
numberFullSubParts + 1 : numberFullSubParts;
const lastPartSize = remainder || azureMpuUtils.maxSubPartSize;
return {
lastPartIndex: numberSubParts - 1,
lastPartSize,
};
};
azureMpuUtils.getSubPartSize = (subPartInfo, subPartIndex) => {
const { lastPartIndex, lastPartSize } = subPartInfo;
return subPartIndex === lastPartIndex ?
lastPartSize : azureMpuUtils.maxSubPartSize;
};
azureMpuUtils.getSubPartIds = (part, uploadId) =>
[...Array(part.numberSubParts).keys()].map(subPartIndex =>
azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex));
azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName,
log, cb) => {
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
= params;
const totalSubParts = 1;
const blockId = azureMpuUtils.getBlockId(uploadId, partNumber, 0);
const passThrough = new stream.PassThrough();
const options = {};
if (contentMD5) {
options.useTransactionalMD5 = true;
options.transactionalContentMD5 = contentMD5;
}
request.pipe(passThrough);
return errorWrapperFn('uploadPart', 'createBlockFromStream',
[blockId, bucketName, objectKey, passThrough, size, options,
(err, result) => {
if (err) {
log.error('Error from Azure data backend uploadPart',
{ error: err.message, dataStoreName });
if (err.code === 'ContainerNotFound') {
return cb(errors.NoSuchBucket);
}
if (err.code === 'InvalidMd5') {
return cb(errors.InvalidDigest);
}
if (err.code === 'Md5Mismatch') {
return cb(errors.BadDigest);
}
return cb(errors.InternalError.customizeDescription(
`Error returned from Azure: ${err.message}`)
);
}
const eTag = objectUtils.getHexMD5(result.headers['content-md5']);
return cb(null, eTag, totalSubParts, size);
}], log, cb);
};
azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo,
subPartStream, subPartIndex, resultsCollector, log, cb) => {
const { uploadId, partNumber, bucketName, objectKey } = partParams;
const subPartSize = azureMpuUtils.getSubPartSize(
subPartInfo, subPartIndex);
const subPartId = azureMpuUtils.getBlockId(uploadId, partNumber,
subPartIndex);
resultsCollector.pushOp();
errorWrapperFn('uploadPart', 'createBlockFromStream',
[subPartId, bucketName, objectKey, subPartStream, subPartSize,
{}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb);
};
azureMpuUtils.putSubParts = (errorWrapperFn, request, params,
dataStoreName, log, cb) => {
const subPartInfo = azureMpuUtils.getSubPartInfo(params.size);
const resultsCollector = new ResultsCollector();
const hashedStream = new MD5Sum();
const streamInterface = new SubStreamInterface(hashedStream);
log.trace('data length is greater than max subpart size;' +
'putting multiple parts');
resultsCollector.on('error', (err, subPartIndex) => {
streamInterface.stopStreaming(request);
log.error(`Error putting subpart to Azure: ${subPartIndex}`,
{ error: err.message, dataStoreName });
if (err.code === 'ContainerNotFound') {
return cb(errors.NoSuchBucket);
}
return cb(errors.InternalError.customizeDescription(
`Error returned from Azure: ${err}`));
});
resultsCollector.on('done', (err, results) => {
if (err) {
log.error('Error putting last subpart to Azure',
{ error: err.message, dataStoreName });
if (err.code === 'ContainerNotFound') {
return cb(errors.NoSuchBucket);
}
return cb(errors.InternalError.customizeDescription(
`Error returned from Azure: ${err}`));
}
const numberSubParts = results.length;
const totalLength = streamInterface.getTotalBytesStreamed();
log.trace('successfully put subparts to Azure',
{ numberSubParts, totalLength });
hashedStream.on('hashed', () => cb(null, hashedStream.completedHash,
numberSubParts, totalLength));
// in case the hashed event was already emitted before the
// event handler was registered:
if (hashedStream.completedHash) {
hashedStream.removeAllListeners('hashed');
return cb(null, hashedStream.completedHash, numberSubParts,
totalLength);
}
return undefined;
});
const currentStream = streamInterface.getCurrentStream();
// start first put to Azure before we start streaming the data
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo,
currentStream, 0, resultsCollector, log, cb);
request.pipe(hashedStream);
hashedStream.on('end', () => {
resultsCollector.enableComplete();
streamInterface.endStreaming();
});
hashedStream.on('data', data => {
const currentLength = streamInterface.getLengthCounter();
if (currentLength + data.length > azureMpuUtils.maxSubPartSize) {
const bytesToMaxSize = azureMpuUtils.maxSubPartSize - currentLength;
const firstChunk = bytesToMaxSize === 0 ? data :
data.slice(bytesToMaxSize);
if (bytesToMaxSize !== 0) {
// if we have not streamed full subpart, write enough of the
// data chunk to stream the correct length
streamInterface.write(data.slice(0, bytesToMaxSize));
}
const { nextStream, subPartIndex } =
streamInterface.transitionToNextStream();
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo,
nextStream, subPartIndex, resultsCollector, log, cb);
streamInterface.write(firstChunk);
} else {
streamInterface.write(data);
}
});
};
module.exports = azureMpuUtils;

View File

@ -1,107 +0,0 @@
const querystring = require('querystring');
const escapeForXml = require('./escapeForXml');
const convertMethods = {};
convertMethods.completeMultipartUpload = xmlParams => {
const escapedBucketName = escapeForXml(xmlParams.bucketName);
return '<?xml version="1.0" encoding="UTF-8"?>' +
'<CompleteMultipartUploadResult ' +
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
`<Location>http://${escapedBucketName}.` +
`${escapeForXml(xmlParams.hostname)}/` +
`${escapeForXml(xmlParams.objectKey)}</Location>` +
`<Bucket>${escapedBucketName}</Bucket>` +
`<Key>${escapeForXml(xmlParams.objectKey)}</Key>` +
`<ETag>${escapeForXml(xmlParams.eTag)}</ETag>` +
'</CompleteMultipartUploadResult>';
};
convertMethods.initiateMultipartUpload = xmlParams =>
'<?xml version="1.0" encoding="UTF-8"?>' +
'<InitiateMultipartUploadResult ' +
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>` +
`<Key>${escapeForXml(xmlParams.objectKey)}</Key>` +
`<UploadId>${escapeForXml(xmlParams.uploadId)}</UploadId>` +
'</InitiateMultipartUploadResult>';
convertMethods.listMultipartUploads = xmlParams => {
const xml = [];
const l = xmlParams.list;
xml.push('<?xml version="1.0" encoding="UTF-8"?>',
'<ListMultipartUploadsResult ' +
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`
);
// For certain XML elements, if it is `undefined`, AWS returns either an
// empty tag or does not include it. Hence the `optional` key in the params.
const params = [
{ tag: 'KeyMarker', value: xmlParams.keyMarker },
{ tag: 'UploadIdMarker', value: xmlParams.uploadIdMarker },
{ tag: 'NextKeyMarker', value: l.NextKeyMarker, optional: true },
{ tag: 'NextUploadIdMarker', value: l.NextUploadIdMarker,
optional: true },
{ tag: 'Delimiter', value: l.Delimiter, optional: true },
{ tag: 'Prefix', value: xmlParams.prefix, optional: true },
];
params.forEach(param => {
if (param.value) {
xml.push(`<${param.tag}>${escapeForXml(param.value)}` +
`</${param.tag}>`);
} else if (!param.optional) {
xml.push(`<${param.tag} />`);
}
});
xml.push(`<MaxUploads>${escapeForXml(l.MaxKeys)}</MaxUploads>`,
`<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`
);
l.Uploads.forEach(upload => {
const val = upload.value;
let key = upload.key;
if (xmlParams.encoding === 'url') {
key = querystring.escape(key);
}
xml.push('<Upload>',
`<Key>${escapeForXml(key)}</Key>`,
`<UploadId>${escapeForXml(val.UploadId)}</UploadId>`,
'<Initiator>',
`<ID>${escapeForXml(val.Initiator.ID)}</ID>`,
`<DisplayName>${escapeForXml(val.Initiator.DisplayName)}` +
'</DisplayName>',
'</Initiator>',
'<Owner>',
`<ID>${escapeForXml(val.Owner.ID)}</ID>`,
`<DisplayName>${escapeForXml(val.Owner.DisplayName)}` +
'</DisplayName>',
'</Owner>',
`<StorageClass>${escapeForXml(val.StorageClass)}` +
'</StorageClass>',
`<Initiated>${escapeForXml(val.Initiated)}</Initiated>`,
'</Upload>'
);
});
l.CommonPrefixes.forEach(prefix => {
xml.push('<CommonPrefixes>',
`<Prefix>${escapeForXml(prefix)}</Prefix>`,
'</CommonPrefixes>'
);
});
xml.push('</ListMultipartUploadsResult>');
return xml.join('');
};
function convertToXml(method, xmlParams) {
return convertMethods[method](xmlParams);
}
module.exports = convertToXml;

View File

@ -1,9 +0,0 @@
const objectUtils = {};
objectUtils.getHexMD5 = base64MD5 =>
Buffer.from(base64MD5, 'base64').toString('hex');
objectUtils.getBase64MD5 = hexMD5 =>
Buffer.from(hexMD5, 'hex').toString('base64');
module.exports = objectUtils;

View File

@ -186,7 +186,7 @@ function parseTagFromQuery(tagQuery) {
for (let i = 0; i < pairs.length; i++) {
const pair = pairs[i];
if (!pair) {
emptyTag++;
emptyTag ++;
continue;
}
const pairArray = pair.split('=');

View File

@ -45,7 +45,7 @@ function _checkModifiedSince(ifModifiedSinceTime, lastModified) {
if (ifModifiedSinceTime) {
res.present = true;
const checkWith = (new Date(ifModifiedSinceTime)).getTime();
if (Number.isNaN(Number(checkWith))) {
if (isNaN(checkWith)) {
res.error = errors.InvalidArgument;
} else if (lastModified <= checkWith) {
res.error = errors.NotModified;
@ -59,7 +59,7 @@ function _checkUnmodifiedSince(ifUnmodifiedSinceTime, lastModified) {
if (ifUnmodifiedSinceTime) {
res.present = true;
const checkWith = (new Date(ifUnmodifiedSinceTime)).getTime();
if (Number.isNaN(Number(checkWith))) {
if (isNaN(checkWith)) {
res.error = errors.InvalidArgument;
} else if (lastModified > checkWith) {
res.error = errors.PreconditionFailed;

View File

@ -35,12 +35,12 @@ function checkUnsupportedRoutes(reqMethod) {
function checkBucketAndKey(bucketName, objectKey, method, reqQuery,
blacklistedPrefixes, log) {
// bucketName should also be undefined, but is checked below anyway
const getServiceCall = (method === 'GET' && !objectKey);
// if empty name and request not a list Buckets or preflight request
if (!bucketName && !(getServiceCall || method === 'OPTIONS')) {
// if empty name and request not a list Buckets
if (!bucketName && !(method === 'GET' && !objectKey)) {
log.debug('empty bucket name', { method: 'routes' });
return errors.MethodNotAllowed;
return (method !== 'OPTIONS') ?
errors.MethodNotAllowed : errors.AccessForbidden
.customizeDescription('CORSResponse: Bucket not found');
}
if (bucketName !== undefined && routesUtils.isValidBucketName(bucketName,
blacklistedPrefixes.bucket) === false) {
@ -186,7 +186,7 @@ function routes(req, res, params, logger) {
if (statsClient) {
// report new request for stats
statsClient.reportNewRequest('s3');
statsClient.reportNewRequest();
}
try {

View File

@ -16,59 +16,61 @@ function routeDELETE(request, response, api, log, statsClient) {
return routesUtils.responseNoBody(err, corsHeaders, response,
204, log);
});
} else if (request.objectKey === undefined) {
if (request.query.website !== undefined) {
return api.callApiMethod('bucketDeleteWebsite', request,
response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
} else if (request.query.cors !== undefined) {
return api.callApiMethod('bucketDeleteCors', request, response,
log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
} else if (request.query.replication !== undefined) {
return api.callApiMethod('bucketDeleteReplication', request,
response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
}
api.callApiMethod('bucketDelete', request, response, log,
(err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, response,
204, log);
});
} else {
if (request.query.tagging !== undefined) {
return api.callApiMethod('objectDeleteTagging', request,
response, log, (err, resHeaders) => {
if (request.objectKey === undefined) {
if (request.query.website !== undefined) {
return api.callApiMethod('bucketDeleteWebsite', request,
response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
} else if (request.query.cors !== undefined) {
return api.callApiMethod('bucketDeleteCors', request, response,
log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
} else if (request.query.replication !== undefined) {
return api.callApiMethod('bucketDeleteReplication', request,
response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
}
api.callApiMethod('bucketDelete', request, response, log,
(err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, resHeaders,
response, 204, log);
return routesUtils.responseNoBody(err, corsHeaders, response,
204, log);
});
} else {
if (request.query.tagging !== undefined) {
return api.callApiMethod('objectDeleteTagging', request,
response, log, (err, resHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, resHeaders,
response, 204, log);
});
}
api.callApiMethod('objectDelete', request, response, log,
(err, corsHeaders) => {
/*
* Since AWS expects a 204 regardless of the existence of
the object, the errors NoSuchKey and NoSuchVersion should not
* be sent back as a response.
*/
if (err && !err.NoSuchKey && !err.NoSuchVersion) {
return routesUtils.responseNoBody(err, corsHeaders,
response, null, log);
}
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(null, corsHeaders, response,
204, log);
});
}
api.callApiMethod('objectDelete', request, response, log,
(err, corsHeaders) => {
/*
* Since AWS expects a 204 regardless of the existence of
the object, the errors NoSuchKey and NoSuchVersion should not
* be sent back as a response.
*/
if (err && !err.NoSuchKey && !err.NoSuchVersion) {
return routesUtils.responseNoBody(err, corsHeaders,
response, null, log);
}
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(null, corsHeaders, response,
204, log);
});
}
return undefined;
}

View File

@ -8,11 +8,9 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
} else if (request.bucketName === undefined
&& request.objectKey === undefined) {
// GET service
api.callApiMethod('serviceGet', request, response, log,
(err, xml, corsHeaders) => {
api.callApiMethod('serviceGet', request, response, log, (err, xml) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, log,
corsHeaders);
return routesUtils.responseXMLBody(err, xml, response, log);
});
} else if (request.objectKey === undefined) {
// GET bucket ACL
@ -76,7 +74,6 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
});
}
} else {
/* eslint-disable no-lonely-if */
if (request.query.acl !== undefined) {
// GET object ACL
api.callApiMethod('objectGetACL', request, response, log,
@ -116,7 +113,6 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
range, log);
});
}
/* eslint-enable */
}
}

View File

@ -10,8 +10,8 @@ function routePUT(request, response, api, log, statsClient) {
// content-length for object is handled separately below
const contentLength = request.headers['content-length'];
if ((contentLength && (Number.isNaN(Number(contentLength))
|| contentLength < 0)) || contentLength === '') {
if ((contentLength && (isNaN(contentLength) || contentLength < 0)) ||
contentLength === '') {
log.debug('invalid content-length header');
return routesUtils.responseNoBody(
errors.BadRequest, null, response, null, log);

View File

@ -19,7 +19,7 @@ function setCommonResponseHeaders(headers, response, log) {
} catch (e) {
log.debug('header can not be added ' +
'to the response', { header: headers[key],
error: e.stack, method: 'setCommonResponseHeaders' });
error: e.stack, method: 'setCommonResponseHeaders' });
}
}
});
@ -108,8 +108,8 @@ const XMLResponseBackend = {
});
setCommonResponseHeaders(corsHeaders, response, log);
response.writeHead(errCode.code,
{ 'Content-Type': 'application/xml',
'Content-Length': bytesSent });
{ 'Content-Type': 'application/xml',
'Content-Length': bytesSent });
return response.end(xmlStr, 'utf8', () => {
log.end().info('responded with error XML', {
httpCode: response.statusCode,
@ -169,8 +169,8 @@ const JSONResponseBackend = {
});
setCommonResponseHeaders(corsHeaders, response, log);
response.writeHead(errCode.code,
{ 'Content-Type': 'application/json',
'Content-Length': bytesSent });
{ 'Content-Type': 'application/json',
'Content-Length': bytesSent });
return response.end(jsonStr, 'utf8', () => {
log.end().info('responded with error JSON', {
httpCode: response.statusCode,
@ -310,8 +310,7 @@ function _contentLengthMatchesLocations(contentLength, dataLocations) {
(sum, location) => (sum !== undefined && location.size ?
sum + Number.parseInt(location.size, 10) :
undefined), 0);
return sumSizes === undefined ||
sumSizes === Number.parseInt(contentLength, 10);
return sumSizes === undefined || sumSizes === contentLength;
}
const routesUtils = {
@ -831,7 +830,6 @@ const routesUtils = {
*/
isValidBucketName(bucketname, prefixBlacklist) {
const ipAddressRegex = new RegExp(/^(\d+\.){3}\d+$/);
// eslint-disable-next-line no-useless-escape
const dnsRegex = new RegExp(/^[a-z0-9]+([\.\-]{1}[a-z0-9]+)*$/);
// Must be at least 3 and no more than 63 characters long.
if (bucketname.length < 3 || bucketname.length > 63) {
@ -880,7 +878,7 @@ const routesUtils = {
*/
statsReport500(err, statsClient) {
if (statsClient && err && err.code === 500) {
statsClient.report500('s3');
statsClient.report500();
}
return undefined;
},

View File

@ -139,8 +139,8 @@ class DataFileStore {
// disable autoClose so that we can close(fd) only after
// fsync() has been called
const fileStream = fs.createWriteStream(filePath,
{ fd,
autoClose: false });
{ fd,
autoClose: false });
fileStream.on('finish', () => {
function ok() {
@ -156,8 +156,8 @@ class DataFileStore {
fs.close(fd);
if (err) {
log.error('fsync error',
{ method: 'put', key, filePath,
error: err });
{ method: 'put', key, filePath,
error: err });
return cbOnce(
errors.InternalError.customizeDescription(
'filesystem error: fsync() returned ' +
@ -247,8 +247,8 @@ class DataFileStore {
return cbOnce(errors.ObjNotFound);
}
log.error('error retrieving file',
{ method: 'get', key, filePath,
error: err });
{ method: 'get', key, filePath,
error: err });
return cbOnce(
errors.InternalError.customizeDescription(
`filesystem read error: ${err.code}`));
@ -273,8 +273,8 @@ class DataFileStore {
return callback(errors.ObjNotFound);
}
log.error('error deleting file', { method: 'delete',
key, filePath,
error: err });
key, filePath,
error: err });
return callback(errors.InternalError.customizeDescription(
`filesystem error: unlink() returned ${err.code}`));
}

View File

@ -96,14 +96,14 @@ class LogConsumer {
this.logger.warn('raft session does not exist yet',
{ raftId: this.raftSession });
return cb(null, { info: { start: null,
end: null } });
end: null } });
}
if (err.code === 416) {
// requested range not satisfiable
this.logger.debug('no new log record to process',
{ raftId: this.raftSession });
return cb(null, { info: { start: null,
end: null } });
end: null } });
}
this.logger.error(
'Error handling record log request', { error: err });
@ -120,7 +120,7 @@ class LogConsumer {
logResponse.log.forEach(entry => recordStream.write(entry));
recordStream.end();
return cb(null, { info: logResponse.info,
log: recordStream });
log: recordStream });
}, this.logger.newRequestLogger());
}
}

View File

@ -91,7 +91,7 @@ class MetadataFileClient {
return done(err);
}
this.logger.info('connected to record log service', { url });
return done(null, logProxy);
return done();
});
return logProxy;
}

View File

@ -134,8 +134,8 @@ class MetadataFileServer {
db to clients */
this.server = new rpc.RPCServer(
{ logger: this.logger,
streamMaxPendingAck: this.streamMaxPendingAck,
streamAckTimeoutMs: this.streamAckTimeoutMs });
streamMaxPendingAck: this.streamMaxPendingAck,
streamAckTimeoutMs: this.streamAckTimeoutMs });
this.server.listen(this.port, this.bindAddress);
this.servers.push(this.server);
@ -147,7 +147,7 @@ class MetadataFileServer {
}
this.servers.forEach(server => {
server.registerServices(...this.services);
server.registerServices.apply(server, this.services);
});
this.genUUIDIfNotExists();

View File

@ -54,7 +54,7 @@ module.exports.setDirSyncFlag = function setDirSyncFlag(path, logger) {
if (doLog) {
if (error) {
logger.warn(warning, { error: error.message,
errorStack: error.stack });
errorStack: error.stack });
} else {
logger.warn(warning);
}

View File

@ -179,7 +179,6 @@ class TestMatrix {
this.listOfSpecialCase.forEach(specialCase => {
const keyCase = specialCase.key;
const result = Object.keys(keyCase).every(currentKey => {
// eslint-disable-next-line no-prototype-builtins
if (this.params.hasOwnProperty(currentKey) === false) {
return false;
}
@ -193,7 +192,7 @@ class TestMatrix {
*/
if (result === true) {
callFunction(this, matrixGenerated,
specialCase.callback, specialCase.description);
specialCase.callback, specialCase.description);
aSpecialCaseWasFound = true;
}
});

View File

@ -185,7 +185,7 @@ class Version {
}
function isMasterKey(key) {
return !key.includes(VID_SEP);
return ! key.includes(VID_SEP);
}

View File

@ -80,7 +80,7 @@ class VersioningRequestProcessor {
return callback(err);
}
// answer if value is not a place holder for deletion
if (!Version.isPHD(data)) {
if (! Version.isPHD(data)) {
return callback(null, data);
}
logger.debug('master version is a PHD, getting the latest version',
@ -133,16 +133,16 @@ class VersioningRequestProcessor {
logger.info('no other versions', { request });
this.dequeueGet(request, errors.ObjNotFound);
return this.repairMaster(request, logger,
{ type: 'del',
value: list[0].value });
{ type: 'del',
value: list[0].value });
}
// need repair
logger.info('update master by the latest version', { request });
const nextValue = list[1].value;
this.dequeueGet(request, null, nextValue);
return this.repairMaster(request, logger,
{ type: 'put', value: list[0].value,
nextValue });
{ type: 'put', value: list[0].value,
nextValue });
});
}
@ -352,7 +352,7 @@ class VersioningRequestProcessor {
// no versioning or versioning configuration off
if (!(options && options.versionId)) {
return this.writeCache.batch({ db,
array: [{ key, type: 'del' }] },
array: [{ key, type: 'del' }] },
logger, callback);
}
// version specific DELETE

View File

@ -3,7 +3,7 @@
"engines": {
"node": "6.9.5"
},
"version": "7.2.0",
"version": "7.0.1",
"description": "Common utilities for the S3 project components",
"main": "index.js",
"repository": {
@ -21,12 +21,9 @@
"async": "~2.1.5",
"debug": "~2.3.3",
"diskusage": "^0.2.2",
"ioredis": "2.4.0",
"ipaddr.js": "1.2.0",
"joi": "^10.6",
"level": "~1.6.0",
"level-sublevel": "~6.6.1",
"simple-glob": "^0.1",
"socket.io": "~1.7.3",
"socket.io-client": "~1.7.3",
"utf8": "2.1.2",

View File

@ -72,20 +72,4 @@ describe('AuthInfo class constructor', () => {
const publicUser = new AuthInfo({ canonicalID: constants.publicId });
assert.strictEqual(publicUser.isRequesterPublicUser(), true);
});
it('should have a working isRequesterAServiceAccount() method', () => {
assert.strictEqual(authInfo.isRequesterAServiceAccount(), false);
const serviceAccount = new AuthInfo({
canonicalID: `${constants.zenkoServiceAccount}/clueso` });
assert.strictEqual(serviceAccount.isRequesterAServiceAccount(), true);
});
it('should have a working isRequesterThisServiceAccount() method', () => {
const serviceAccount = new AuthInfo({
canonicalID: `${constants.zenkoServiceAccount}/clueso` });
assert.strictEqual(
serviceAccount.isRequesterThisServiceAccount('backbeat'), false);
assert.strictEqual(
serviceAccount.isRequesterThisServiceAccount('clueso'), true);
});
});

View File

@ -0,0 +1,47 @@
const assert = require('assert');
const Backend = require('../../../../lib/auth/auth').inMemory.backend.s3;
const DummyRequestLogger = require('../../helpers').DummyRequestLogger;
const authData = require('./sample_authdata');
const backend = new Backend(JSON.parse(JSON.stringify(authData)));
const counter = 10;
// eslint-disable-next-line arrow-body-style
const specificResource = [...Array(counter).keys()].map(i => {
return {
key: `key${i}`,
};
});
const generalResource = 'bucketName';
const requestContexts = {
constantParams: {
generalResource,
},
parameterize: {
specificResource,
},
};
const service = 's3';
const userArn = 'aws::iam:123456789012:root';
const log = new DummyRequestLogger();
// eslint-disable-next-line arrow-body-style
const expectedResults = specificResource.map(entry => {
return {
isAllowed: true,
arn: `arn:aws:${service}:::${generalResource}/${entry.key}`,
versionId: undefined,
};
});
describe('S3AuthBackend.checkPolicies', () => {
it(' should mock successful results', done => {
backend.checkPolicies(requestContexts, userArn, log,
(err, vaultReturnObject) => {
assert.strictEqual(err, null, `Unexpected err: ${err}`);
assert.deepStrictEqual(vaultReturnObject, {
message: { body: expectedResults },
});
return done();
});
});
});

View File

@ -2,7 +2,7 @@ const assert = require('assert');
const Indexer = require('../../../../lib/auth/in_memory/Indexer');
const ref = require('./sample_authdata.json');
const { should } = require('./AuthLoader.spec');
const { should } = require('./validateAuthConfig');
describe('S3 AuthData Indexer', () => {
let obj = {};
@ -28,6 +28,15 @@ describe('S3 AuthData Indexer', () => {
done();
});
it('Should return user from email', done => {
const res = index.getEntityByEmail(obj.accounts[0].users[0].email);
assert.strictEqual(typeof res, 'object');
assert.strictEqual(res.arn, obj.accounts[0].arn);
assert.strictEqual(res.IAMdisplayName,
obj.accounts[0].users[0].name);
done();
});
it('Should return account from key', done => {
const res = index.getEntityByKey(obj.accounts[0].keys[0].access);
assert.strictEqual(typeof res, 'object');
@ -35,6 +44,16 @@ describe('S3 AuthData Indexer', () => {
done();
});
it('Should return user from key', done => {
const res = index.getEntityByKey(obj.accounts[0].users[0].keys[0]
.access);
assert.strictEqual(typeof res, 'object');
assert.strictEqual(res.arn, obj.accounts[0].arn);
assert.strictEqual(res.IAMdisplayName,
obj.accounts[0].users[0].name);
done();
});
it('should index account without keys', done => {
should._exec = () => {
index = new Indexer(obj);

View File

@ -2,22 +2,44 @@
"accounts": [{
"name": "Bart",
"email": "sampleaccount1@sampling.com",
"arn": "arn:aws:iam::123456789012:root",
"arn": "aws::iam:123456789012:root",
"canonicalID": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be",
"shortid": "123456789012",
"keys": [{
"access": "accessKey1",
"secret": "verySecretKey1"
}]
}],
"users": [{
"name": "Bart Jr",
"email": "user1.sampleaccount2@sampling.com",
"arn": "aws::iam:123456789013:bart",
"keys": [{
"access": "USERBARTFUNACCESSKEY",
"secret": "verySecretKey1"
}]
}],
"sasToken": "test0"
}, {
"name": "Lisa",
"email": "sampleaccount2@sampling.com",
"arn": "arn:aws:iam::123456789013:root",
"arn": "aws::iam:accessKey2:user/Lisa",
"canonicalID": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2bf",
"shortid": "123456789013",
"shortid": "123456789012",
"keys": [{
"access": "accessKey2",
"secret": "verySecretKey2"
}]
}],
"sasToken": "test1"
}, {
"name": "Docker",
"email": "sampleaccount3@sampling.com",
"arn": "aws::iam:accessKeyDocker:user/Docker",
"canonicalID": "sd359df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47eh3hd",
"shortid": "123456789012",
"keys": [{
"access": "accessKeyDocker",
"secret": "verySecretKeyDocker"
}],
"sasToken": "test2"
}]
}

View File

@ -2,12 +2,13 @@
"accounts": [{
"name": "Zenko",
"email": "sampleaccount4@sampling.com",
"arn": "aws::iam:123456789015:root",
"arn": "aws::iam:accessKeyZenko:user/Zenko",
"canonicalID": "newCanId",
"shortid": "123456789015",
"shortid": "123456789012",
"keys": [{
"access": "accessKeyZenko",
"secret": "verySecretKeyZenko"
}]
}],
"sasToken": "test2"
}]
}

View File

@ -1,7 +1,8 @@
const assert = require('assert');
const werelogs = require('werelogs');
const AuthLoader = require('../../../../lib/auth/auth').inMemory.AuthLoader;
const validateAuthConfig
= require('../../../../lib/auth/auth').inMemory.validateAuthConfig;
const ref = require('./sample_authdata.json');
werelogs.configure({
@ -15,7 +16,7 @@ function getParentField(obj, field) {
for (let i = 0; i < fields.length - 1; ++i) {
const cur = fields[i];
const n = Number(cur, 10);
if (Number.isNaN(n)) {
if (isNaN(n)) {
parent = parent[cur];
} else {
parent = parent[n];
@ -28,19 +29,15 @@ function getFieldName(field) {
return field.split('.').pop();
}
function shouldFail(obj, done) {
const authLoader = new AuthLoader(werelogs);
authLoader.addAccounts(obj);
const res = authLoader.validate();
assert.strictEqual(res, false);
function shouldFail(obj, checkSas, done) {
const res = validateAuthConfig(obj, werelogs, checkSas);
assert.strictEqual(res, true);
done();
}
function shouldSucceed(obj, done) {
const authLoader = new AuthLoader(werelogs);
authLoader.addAccounts(obj);
const res = authLoader.validate();
assert.strictEqual(res, true);
function shouldSuccess(obj, checkSas, done) {
const res = validateAuthConfig(obj, werelogs, checkSas);
assert.strictEqual(res, false);
done();
}
@ -48,15 +45,15 @@ const should = {
_exec: undefined,
missingField: (obj, field, done) => {
delete getParentField(obj, field)[getFieldName(field)];
should._exec(obj, done);
should._exec(obj, true, done);
},
modifiedField: (obj, field, value, done) => {
getParentField(obj, field)[getFieldName(field)] = value;
should._exec(obj, done);
should._exec(obj, true, done);
},
};
describe('AuthLoader class', () => {
describe('S3 AuthData Checker', () => {
let obj = {};
beforeEach(done => {
@ -74,10 +71,18 @@ describe('AuthLoader class', () => {
['accounts.0.email', 64],
['accounts.0.arn', undefined],
['accounts.0.arn', 64],
['accounts.0.sasToken', undefined],
['accounts.0.sasToken', 64],
['accounts.0.canonicalID', undefined],
['accounts.0.canonicalID', 64],
['accounts.0.users', 'not an object'],
['accounts.0.users.0.arn', undefined],
['accounts.0.users.0.arn', 64],
['accounts.0.users.0.email', undefined],
['accounts.0.users.0.email', 64],
['accounts.0.users.0.keys', undefined],
['accounts.0.users.0.keys', 'not an Array'],
['accounts.0.keys', 'not an Array'],
['accounts.0.keys', undefined],
].forEach(test => {
if (test[1] === undefined) {
// Check a failure when deleting required fields
@ -88,8 +93,7 @@ describe('AuthLoader class', () => {
} else {
// Check a failure when the type of field is different than
// expected
it(`should fail when modified field ${test[0]} ${test[1]}`,
done => {
it(`should fail when modified field ${test[0]}${test[1]}`, done => {
should._exec = shouldFail;
should.modifiedField(obj, test[0], test[1], done);
});
@ -105,30 +109,52 @@ describe('AuthLoader class', () => {
'accounts.0.users',
].forEach(test => {
// Check a success when deleting optional fields
it(`should return success when missing field ${test}`, done => {
should._exec = shouldSucceed;
it(`should success when missing field ${test[0]}`, done => {
should._exec = shouldSuccess;
should.missingField(obj, test[0], done);
});
});
it('Should return error on two same canonicalID', done => {
obj.accounts[0].canonicalID = obj.accounts[1].canonicalID;
shouldFail(obj, done);
it('Should return success if no sasToken and checkSas false', done => {
obj.accounts[0].sasToken = undefined;
shouldSuccess(obj, false, done);
});
it('Should return error on two same emails', done => {
it('Should return error on two same sasTokens and checkSas true', done => {
obj.accounts[0].sasToken = obj.accounts[1].sasToken;
shouldFail(obj, true, done);
});
it('Should return success on two same sasTokens and checkSas false',
done => {
obj.accounts[0].sasToken = obj.accounts[1].sasToken;
shouldSuccess(obj, false, done);
});
it('Should return error on two same canonicalID', done => {
obj.accounts[0].canonicalID = obj.accounts[1].canonicalID;
shouldFail(obj, null, done);
});
it('Should return error on two same emails, account-account', done => {
obj.accounts[0].email = obj.accounts[1].email;
shouldFail(obj, done);
shouldFail(obj, null, done);
});
it('Should return error on two same emails account-user', done => {
obj.accounts[0].users[0].email = obj.accounts[1].email;
shouldFail(obj, null, done);
});
it('Should return error on two same arn', done => {
obj.accounts[0].arn = obj.accounts[1].arn;
shouldFail(obj, done);
obj.accounts[0].arn = obj.accounts[0].users[0].arn;
shouldFail(obj, null, done);
});
it('Should return error on two same access key', done => {
obj.accounts[0].keys[0].access = obj.accounts[1].keys[0].access;
shouldFail(obj, done);
obj.accounts[0].keys[0].access =
obj.accounts[0].users[0].keys[0].access;
shouldFail(obj, null, done);
});
});

View File

@ -7,131 +7,119 @@ const constructStringToSign =
const DummyRequestLogger = require('../../helpers').DummyRequestLogger;
const log = new DummyRequestLogger();
[
{ path: '', desc: 'constructStringToSign function' },
{ path: '/_/proxy', desc: 'constructStringToSign function with proxy' },
].forEach(item => {
describe(item.desc, () => {
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
// latest/API/sig-v4-header-based-auth.html
it('should construct a stringToSign in accordance ' +
'with AWS rules for a get object request (header auth)', () => {
const path = '/test.txt';
const params = {
request: {
method: 'GET',
path: `${item.path}${path}`,
headers: {
'host': 'examplebucket.s3.amazonaws.com',
'x-amz-date': '20130524T000000Z',
'authorization': 'AWS4-HMAC-SHA256 Credential' +
'=AKIAIOSFODNN7EXAMPLE/20130524/us-east-1/' +
's3/aws4_request,SignedHeaders=host;range;' +
'x-amz-content-sha256;x-amz-date,Signature=' +
'f0e8bdb87c964420e857bd35b5d6ed310bd44f' +
'0170aba48dd91039c6036bdb41',
'range': 'bytes=0-9',
'x-amz-content-sha256': 'e3b0c44298fc1c149afbf4c' +
'8996fb92427ae41e4649b934ca495991b7852b855',
},
},
query: {},
signedHeaders: 'host;range;x-amz-content-sha256;x-amz-date',
payloadChecksum: 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4' +
'649b934ca495991b7852b855',
credentialScope: '20130524/us-east-1/s3/aws4_request',
timestamp: '20130524T000000Z',
log,
proxyPath: item.path ? path : undefined,
};
const expectedOutput = 'AWS4-HMAC-SHA256\n' +
'20130524T000000Z\n' +
'20130524/us-east-1/s3/aws4_request\n' +
'7344ae5b7ee6c3e7e6b0fe0640412a37625d1fbfff95c48bbb2dc439649' +
'46972';
const actualOutput = constructStringToSign(params);
assert.strictEqual(actualOutput, expectedOutput);
});
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
// latest/API/sig-v4-header-based-auth.html
it('should construct a stringToSign in accordance ' +
'with AWS rules for a put object request (header auth)', () => {
const path = '/test$file.text';
const params = {
request: {
method: 'PUT',
path: `${item.path}${path}`,
headers: {
'date': 'Fri, 24 May 2013 00:00:00 GMT',
'host': 'examplebucket.s3.amazonaws.com',
'x-amz-date': '20130524T000000Z',
'authorization': 'AWS4-HMAC-SHA256 Credential' +
'=AKIAIOSFODNN7EXAMPLE/20130524/us-east-1' +
'/s3/aws4_request,SignedHeaders=date;host;' +
'x-amz-content-sha256;x-amz-date;x-amz-storage' +
'-class,Signature=98ad721746da40c64f1a55b78f14c2' +
'38d841ea1380cd77a1b5971af0ece108bd',
'x-amz-storage-class': 'REDUCED_REDUNDANCY',
'x-amz-content-sha256': '44ce7dd67c959e0d3524ffac1' +
'771dfbba87d2b6b4b4e99e42034a8b803f8b072',
},
describe('constructStringToSign function', () => {
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
// latest/API/sig-v4-header-based-auth.html
it('should construct a stringToSign in accordance ' +
'with AWS rules for a get object request (header auth)', () => {
const params = {
request: {
method: 'GET',
path: '/test.txt',
headers: {
'host': 'examplebucket.s3.amazonaws.com',
'x-amz-date': '20130524T000000Z',
'authorization': 'AWS4-HMAC-SHA256 Credential' +
'=AKIAIOSFODNN7EXAMPLE/20130524/us-east-1/' +
's3/aws4_request,SignedHeaders=host;range;' +
'x-amz-content-sha256;x-amz-date,Signature=' +
'f0e8bdb87c964420e857bd35b5d6ed310bd44f' +
'0170aba48dd91039c6036bdb41',
'range': 'bytes=0-9',
'x-amz-content-sha256': 'e3b0c44298fc1c149afbf4c' +
'8996fb92427ae41e4649b934ca495991b7852b855',
},
query: {},
signedHeaders: 'date;host;x-amz-content-sha256;' +
'x-amz-date;x-amz-storage-class',
payloadChecksum: '44ce7dd67c959e0d3524ffac1771dfbba8' +
'7d2b6b4b4e99e42034a8b803f8b072',
credentialScope: '20130524/us-east-1/s3/aws4_request',
timestamp: '20130524T000000Z',
log,
proxyPath: item.path ? path : undefined,
};
const expectedOutput = 'AWS4-HMAC-SHA256\n' +
'20130524T000000Z\n' +
'20130524/us-east-1/s3/aws4_request\n' +
'9e0e90d9c76de8fa5b200d8c849cd5b8dc7a3' +
'be3951ddb7f6a76b4158342019d';
const actualOutput = constructStringToSign(params);
assert.strictEqual(actualOutput, expectedOutput);
});
},
query: {},
signedHeaders: 'host;range;x-amz-content-sha256;x-amz-date',
payloadChecksum: 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4' +
'649b934ca495991b7852b855',
credentialScope: '20130524/us-east-1/s3/aws4_request',
timestamp: '20130524T000000Z',
log,
};
const expectedOutput = 'AWS4-HMAC-SHA256\n' +
'20130524T000000Z\n' +
'20130524/us-east-1/s3/aws4_request\n' +
'7344ae5b7ee6c3e7e6b0fe0640412a37625d1fbfff95c48bbb2dc43964946972';
const actualOutput = constructStringToSign(params);
assert.strictEqual(actualOutput, expectedOutput);
});
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
// latest/API/sig-v4-header-based-auth.html
it('should construct a stringToSign in accordance ' +
'with AWS rules for a pre-signed get url request (query auth)',
() => {
const path = '/test.txt';
const params = {
request: {
method: 'GET',
path: `${item.path}${path}`,
headers: {
host: 'examplebucket.s3.amazonaws.com',
},
},
query: {
'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
'X-Amz-Credential': 'AKIAIOSFODNN7EXAMPLE/20130524/' +
'us-east-1/s3/aws4_request',
'X-Amz-Date': '20130524T000000Z',
'X-Amz-Expires': '86400',
'X-Amz-SignedHeaders': 'host',
},
signedHeaders: 'host',
payloadChecksum: 'UNSIGNED-PAYLOAD',
credentialScope: '20130524/us-east-1/s3/aws4_request',
timestamp: '20130524T000000Z',
log,
proxyPath: item.path ? path : undefined,
};
const expectedOutput = 'AWS4-HMAC-SHA256\n' +
'20130524T000000Z\n' +
'20130524/us-east-1/s3/aws4_request\n' +
'3bfa292879f6447bbcda7001decf97f4a54d' +
'c650c8942174ae0a9121cf58ad04';
const actualOutput = constructStringToSign(params);
assert.strictEqual(actualOutput, expectedOutput);
});
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
// latest/API/sig-v4-header-based-auth.html
it('should construct a stringToSign in accordance ' +
'with AWS rules for a put object request (header auth)', () => {
const params = {
request: {
method: 'PUT',
path: '/test$file.text',
headers: {
'date': 'Fri, 24 May 2013 00:00:00 GMT',
'host': 'examplebucket.s3.amazonaws.com',
'x-amz-date': '20130524T000000Z',
'authorization': 'AWS4-HMAC-SHA256 Credential' +
'=AKIAIOSFODNN7EXAMPLE/20130524/us-east-1' +
'/s3/aws4_request,SignedHeaders=date;host;' +
'x-amz-content-sha256;x-amz-date;x-amz-storage' +
'-class,Signature=98ad721746da40c64f1a55b78f14c2' +
'38d841ea1380cd77a1b5971af0ece108bd',
'x-amz-storage-class': 'REDUCED_REDUNDANCY',
'x-amz-content-sha256': '44ce7dd67c959e0d3524ffac1' +
'771dfbba87d2b6b4b4e99e42034a8b803f8b072',
},
},
query: {},
signedHeaders: 'date;host;x-amz-content-sha256;' +
'x-amz-date;x-amz-storage-class',
payloadChecksum: '44ce7dd67c959e0d3524ffac1771dfbba8' +
'7d2b6b4b4e99e42034a8b803f8b072',
credentialScope: '20130524/us-east-1/s3/aws4_request',
timestamp: '20130524T000000Z',
log,
};
const expectedOutput = 'AWS4-HMAC-SHA256\n' +
'20130524T000000Z\n' +
'20130524/us-east-1/s3/aws4_request\n' +
'9e0e90d9c76de8fa5b200d8c849cd5b8dc7a3' +
'be3951ddb7f6a76b4158342019d';
const actualOutput = constructStringToSign(params);
assert.strictEqual(actualOutput, expectedOutput);
});
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
// latest/API/sig-v4-header-based-auth.html
it('should construct a stringToSign in accordance ' +
'with AWS rules for a pre-signed get url request (query auth)', () => {
const params = {
request: {
method: 'GET',
path: '/test.txt',
headers: {
host: 'examplebucket.s3.amazonaws.com',
},
},
query: {
'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
'X-Amz-Credential': 'AKIAIOSFODNN7EXAMPLE/20130524/' +
'us-east-1/s3/aws4_request',
'X-Amz-Date': '20130524T000000Z',
'X-Amz-Expires': '86400',
'X-Amz-SignedHeaders': 'host',
},
signedHeaders: 'host',
payloadChecksum: 'UNSIGNED-PAYLOAD',
credentialScope: '20130524/us-east-1/s3/aws4_request',
timestamp: '20130524T000000Z',
log,
};
const expectedOutput = 'AWS4-HMAC-SHA256\n' +
'20130524T000000Z\n' +
'20130524/us-east-1/s3/aws4_request\n' +
'3bfa292879f6447bbcda7001decf97f4a54d' +
'c650c8942174ae0a9121cf58ad04';
const actualOutput = constructStringToSign(params);
assert.strictEqual(actualOutput, expectedOutput);
});
});

View File

@ -111,4 +111,4 @@ class DummyRequestLogger {
}
module.exports = { makeid, timeDiff, makeAuthInfo,
createAlteredRequest, zpad, DummyRequestLogger };
createAlteredRequest, zpad, DummyRequestLogger };

View File

@ -11,7 +11,7 @@ describe('Matrix', () => {
password: 'nopassword',
delimiter: [undefined, '/', '', '|', 'abcd'],
prefix: [undefined, '/validPrefix/ThatIsNot/InTheSet',
'/validPrefix/ThatIsPresent/InTheTestSet', 'InvalidPrefix'],
'/validPrefix/ThatIsPresent/InTheTestSet', 'InvalidPrefix'],
};
/**
@ -58,7 +58,7 @@ describe('Matrix', () => {
++numberOfCallV2;
done();
}, 'should use v2 auth').testSpecialCase(ifParams, (testMatrix,
done) => {
done) => {
assert.equal(testMatrix.params.auth === 'v4', true);
++numberOfCallV4;
done();

View File

@ -1,78 +0,0 @@
'use strict'; // eslint-disable-line strict
const assert = require('assert');
const RedisClient = require('../../../lib/metrics/RedisClient');
const StatsClient = require('../../../lib/metrics/StatsClient');
// setup redis client
const config = {
host: '127.0.0.1',
port: 6379,
enableOfflineQueue: false,
};
const fakeLogger = {
trace: () => {},
error: () => {},
};
const redisClient = new RedisClient(config, fakeLogger);
// setup stats client
const STATS_INTERVAL = 5; // 5 seconds
const STATS_EXPIRY = 30; // 30 seconds
const statsClient = new StatsClient(redisClient, STATS_INTERVAL, STATS_EXPIRY);
describe('StatsClient class', () => {
const id = 'arsenal-test';
afterEach(() => redisClient.clear(() => {}));
it('should correctly record a new request', () => {
statsClient.reportNewRequest(id, (err, res) => {
assert.ifError(err);
assert(Array.isArray(res));
assert.equal(res.length, 2);
const expected = [[null, 1], [null, 1]];
assert.deepEqual(res, expected);
});
statsClient.reportNewRequest(id, (err, res) => {
assert.ifError(err);
assert(Array.isArray(res));
assert.equal(res.length, 2);
const expected = [[null, 2], [null, 1]];
assert.deepEqual(res, expected);
});
});
it('should correctly record a 500 on the server', () => {
statsClient.report500(id, (err, res) => {
assert.ifError(err);
assert(Array.isArray(res));
assert.equal(res.length, 2);
const expected = [[null, 1], [null, 1]];
assert.deepEqual(res, expected);
});
});
it('should respond back with total requests', () => {
statsClient.reportNewRequest(id, err => {
assert.ifError(err);
});
statsClient.report500(id, err => {
assert.ifError(err);
});
statsClient.getStats(fakeLogger, id, (err, res) => {
assert.ifError(err);
assert.equal(typeof res, 'object');
assert.equal(Object.keys(res).length, 3);
assert.equal(res.sampleDuration, STATS_EXPIRY);
const expected = { 'requests': 1, '500s': 1, 'sampleDuration': 30 };
assert.deepEqual(res, expected);
});
});
});

View File

@ -1,136 +0,0 @@
const assert = require('assert');
const ARN = require('../../../lib/models/ARN');
describe('ARN object model', () => {
describe('valid ARNs', () => {
[{ arn: 'arn:aws:iam::123456789012:role/backbeat',
service: 'iam',
accountId: '123456789012',
resource: 'role/backbeat',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: true,
},
{ arn: 'arn:aws:iam::*:role/backbeat',
service: 'iam',
accountId: '*',
resource: 'role/backbeat',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: true,
},
{ arn: 'arn:aws:iam:::role/backbeat',
service: 'iam',
accountId: null,
resource: 'role/backbeat',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: false, // not a valid role without an account ID
},
{ arn: 'arn:aws:iam::123456789012:user/bart',
service: 'iam',
accountId: '123456789012',
resource: 'user/bart',
isIAMAccount: false,
isIAMUser: true,
isIAMRole: false,
},
{ arn: 'arn:aws:iam:::user/bart',
service: 'iam',
accountId: null,
resource: 'user/bart',
isIAMAccount: false,
isIAMUser: false, // not a valid user without an account ID
isIAMRole: false,
},
{ arn: 'arn:aws:iam::123456789012:root',
service: 'iam',
accountId: '123456789012',
resource: 'root',
isIAMAccount: true,
isIAMUser: false,
isIAMRole: false,
},
{ arn: 'arn:aws:iam:::root',
service: 'iam',
accountId: null,
resource: 'root',
isIAMAccount: false, // not a valid account without an account ID
isIAMUser: false,
isIAMRole: false,
},
{ arn: 'arn:aws:s3::123456789012:foo/bar/baz/qux',
service: 's3',
accountId: '123456789012',
resource: 'foo/bar/baz/qux',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: false,
},
{ arn: 'arn:aws:s3::123456789012:foo:bar/baz/qux',
service: 's3',
accountId: '123456789012',
resource: 'foo:bar/baz/qux',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: false,
},
{ arn: 'arn:aws:sts::123456789012:foobar',
service: 'sts',
accountId: '123456789012',
resource: 'foobar',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: false,
},
{ arn: 'arn:aws:ring::123456789012:foobar',
service: 'ring',
accountId: '123456789012',
resource: 'foobar',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: false,
},
{ arn: 'arn:scality:utapi::123456789012:foobar',
service: 'utapi',
accountId: '123456789012',
resource: 'foobar',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: false,
},
{ arn: 'arn:scality:sso::123456789012:foobar',
service: 'sso',
accountId: '123456789012',
resource: 'foobar',
isIAMAccount: false,
isIAMUser: false,
isIAMRole: false,
},
].forEach(arnTest => it(`should accept ARN "${arnTest.arn}"`, () => {
const arnObj = ARN.createFromString(arnTest.arn);
assert(arnObj instanceof ARN);
assert.strictEqual(arnObj.getService(), arnTest.service);
assert.strictEqual(arnObj.getAccountId(), arnTest.accountId);
assert.strictEqual(arnObj.getResource(), arnTest.resource);
assert.strictEqual(arnObj.isIAMAccount(), arnTest.isIAMAccount);
assert.strictEqual(arnObj.isIAMUser(), arnTest.isIAMUser);
assert.strictEqual(arnObj.isIAMRole(), arnTest.isIAMRole);
assert.strictEqual(arnObj.toString(), arnTest.arn);
}));
});
describe('bad ARNs', () => {
['',
':',
'foo:',
'arn::iam::123456789012:role/backbeat',
'arn:aws:xxx::123456789012:role/backbeat',
'arn:aws:s3::123456789012345:role/backbeat',
'arn:aws:s3::12345678901b:role/backbeat',
].forEach(arn => it(`should fail with invalid ARN "${arn}"`, () => {
const res = ARN.createFromString(arn);
assert.notStrictEqual(res.error, undefined);
}));
});
});

View File

@ -1,6 +1,5 @@
const assert = require('assert');
const ObjectMD = require('../../../lib/models/ObjectMD');
const constants = require('../../../lib/constants');
describe('ObjectMD class setters/getters', () => {
let md = null;
@ -11,6 +10,7 @@ describe('ObjectMD class setters/getters', () => {
[
// In order: data property, value to set/get, default value
['ModelVersion', null, 3],
['OwnerDisplayName', null, ''],
['OwnerDisplayName', 'owner-display-name'],
['OwnerId', null, ''],
@ -79,8 +79,6 @@ describe('ObjectMD class setters/getters', () => {
destination: '',
storageClass: '',
role: '',
storageType: '',
dataStoreVersionId: '',
}],
['ReplicationInfo', {
status: 'PENDING',
@ -89,8 +87,6 @@ describe('ObjectMD class setters/getters', () => {
storageClass: 'STANDARD',
role: 'arn:aws:iam::account-id:role/src-resource,' +
'arn:aws:iam::account-id:role/dest-resource',
storageType: 'aws_s3',
dataStoreVersionId: 'QWY1QQwWn9xJcoz0EgJjJ_t8g4nMYsxo',
}],
['DataStoreName', null, ''],
].forEach(test => {
@ -114,91 +110,3 @@ describe('ObjectMD class setters/getters', () => {
});
});
});
describe('ObjectMD import from stored blob', () => {
it('should export and import correctly the latest model version', () => {
const md = new ObjectMD();
const jsonMd = md.getSerialized();
const importedRes = ObjectMD.createFromBlob(jsonMd);
assert.ifError(importedRes.error);
const importedMd = importedRes.result;
assert.deepStrictEqual(md, importedMd);
});
it('should convert old location to new location', () => {
const md = new ObjectMD();
const value = md.getValue();
value['md-model-version'] = 1;
value.location = 'stringLocation';
const jsonMd = JSON.stringify(value);
const importedRes = ObjectMD.createFromBlob(jsonMd);
assert.strictEqual(importedRes.error, undefined);
const importedMd = importedRes.result;
const valueImported = importedMd.getValue();
assert.strictEqual(valueImported['md-model-version'],
constants.mdModelVersion);
assert.deepStrictEqual(valueImported.location,
[{ key: 'stringLocation' }]);
});
it('should keep null location as is', () => {
const md = new ObjectMD();
const value = md.getValue();
value.location = null;
const jsonMd = JSON.stringify(value);
const importedRes = ObjectMD.createFromBlob(jsonMd);
assert.strictEqual(importedRes.error, undefined);
const importedMd = importedRes.result;
const valueImported = importedMd.getValue();
assert.deepStrictEqual(valueImported.location, null);
importedMd.setLocation([]);
assert.deepStrictEqual(importedMd.getValue().location, null);
});
it('should add dataStoreName attribute if missing', () => {
const md = new ObjectMD();
const value = md.getValue();
value['md-model-version'] = 2;
delete value.dataStoreName;
const jsonMd = JSON.stringify(value);
const importedRes = ObjectMD.createFromBlob(jsonMd);
assert.strictEqual(importedRes.error, undefined);
const importedMd = importedRes.result;
const valueImported = importedMd.getValue();
assert.strictEqual(valueImported['md-model-version'],
constants.mdModelVersion);
assert.notStrictEqual(valueImported.dataStoreName, undefined);
});
it('should return undefined for dataStoreVersionId if no object location',
() => {
const md = new ObjectMD();
const value = md.getValue();
const jsonMd = JSON.stringify(value);
const importedRes = ObjectMD.createFromBlob(jsonMd);
assert.strictEqual(importedRes.error, undefined);
const importedMd = importedRes.result;
assert.strictEqual(importedMd.getDataStoreVersionId(), undefined);
});
it('should get dataStoreVersionId if saved in object location', () => {
const md = new ObjectMD();
const dummyLocation = {
dataStoreVersionId: 'data-store-version-id',
};
md.setLocation([dummyLocation]);
const value = md.getValue();
const jsonMd = JSON.stringify(value);
const importedRes = ObjectMD.createFromBlob(jsonMd);
assert.strictEqual(importedRes.error, undefined);
const importedMd = importedRes.result;
assert.strictEqual(importedMd.getDataStoreVersionId(),
dummyLocation.dataStoreVersionId);
});
it('should return an error if blob is malformed JSON', () => {
const importedRes = ObjectMD.createFromBlob('{BAD JSON}');
assert.notStrictEqual(importedRes.error, undefined);
assert.strictEqual(importedRes.result, undefined);
});
});

View File

@ -9,17 +9,16 @@ describe('round robin hosts', () => {
caption: 'with { host, port } objects in list',
hostsList: [{ host: '1.2.3.0', port: 1000 },
{ host: '1.2.3.1', port: 1001 },
{ host: '1.2.3.2' }],
{ host: '1.2.3.2', port: 1002 }],
}, {
caption: 'with "host:port" strings in list',
hostsList: ['1.2.3.0:1000',
'1.2.3.1:1001',
'1.2.3.2'],
'1.2.3.1:1001',
'1.2.3.2'],
}].forEach(testCase => describe(testCase.caption, () => {
beforeEach(() => {
roundRobin = new RoundRobin(testCase.hostsList,
{ stickyCount: 10,
defaultPort: 1002 });
{ stickyCount: 10 });
});
it('should pick all hosts in turn', () => {
@ -32,7 +31,8 @@ describe('round robin hosts', () => {
// expect 3 loops of 10 times each of the 3 hosts
for (let i = 0; i < 90; ++i) {
const hostItem = roundRobin.pickHost();
hostsPickCount[hostItem.host] += 1;
hostsPickCount[hostItem.host] =
hostsPickCount[hostItem.host] + 1;
}
assert.strictEqual(hostsPickCount['1.2.3.0'], 30);
assert.strictEqual(hostsPickCount['1.2.3.1'], 30);
@ -51,7 +51,8 @@ describe('round robin hosts', () => {
const curHost = roundRobin.getCurrentHost();
for (let i = 0; i < 10; ++i) {
const hostItem = roundRobin.pickHost();
hostsPickCount[hostItem.host] += 1;
hostsPickCount[hostItem.host] =
hostsPickCount[hostItem.host] + 1;
}
assert.strictEqual(hostsPickCount[curHost.host], 10);
});
@ -66,7 +67,8 @@ describe('round robin hosts', () => {
// expect each host to be picked up 3 times
for (let i = 0; i < 9; ++i) {
const hostItem = roundRobin.pickNextHost();
hostsPickCount[hostItem.host] += 1;
hostsPickCount[hostItem.host] =
hostsPickCount[hostItem.host] + 1;
}
assert.strictEqual(hostsPickCount['1.2.3.0'], 3);
assert.strictEqual(hostsPickCount['1.2.3.1'], 3);
@ -99,18 +101,5 @@ describe('round robin hosts', () => {
// eslint-disable-next-line no-new
new RoundRobin(['zenko.io', 'zenka.ia']);
});
it('should have set default port if not in bootstrap list', () => {
// the current host should be picked 10 times in a row
const portMap = {
'1.2.3.0': 1000,
'1.2.3.1': 1001,
'1.2.3.2': 1002,
};
for (let i = 0; i < 100; ++i) {
const hostItem = roundRobin.pickHost();
assert.strictEqual(hostItem.port, portMap[hostItem.host]);
}
});
}));
});

View File

@ -16,13 +16,13 @@ function checkParseRange(rangeHeader, totalLength, expectedRange) {
describe('parseRangeSpec function', () => {
[{ rangeHeader: 'bytes=1000-2000',
expectedRangeSpec: { start: 1000, end: 2000 } },
{ rangeHeader: 'bytes=1000-',
expectedRangeSpec: { start: 1000 } },
{ rangeHeader: 'bytes=-',
expectedRangeSpec: { error: errors.InvalidArgument } },
{ rangeHeader: 'bytes=10-9',
expectedRangeSpec: { error: errors.InvalidArgument } },
expectedRangeSpec: { start: 1000, end: 2000 } },
{ rangeHeader: 'bytes=1000-',
expectedRangeSpec: { start: 1000 } },
{ rangeHeader: 'bytes=-',
expectedRangeSpec: { error: errors.InvalidArgument } },
{ rangeHeader: 'bytes=10-9',
expectedRangeSpec: { error: errors.InvalidArgument } },
].forEach(testCase => {
const { rangeHeader, expectedRangeSpec } = testCase;
@ -45,25 +45,25 @@ describe('parseRangeSpec function', () => {
describe('getByteRangeFromSpec function', () => {
[{ rangeSpec: { start: 1000, end: 2000 }, objectSize: 3000,
expectedByteRange: { range: [1000, 2000] } },
{ rangeSpec: { start: 1000, end: 5000 }, objectSize: 3000,
expectedByteRange: { range: [1000, 2999] } },
{ rangeSpec: { start: 1000 }, objectSize: 3000,
expectedByteRange: { range: [1000, 2999] } },
{ rangeSpec: { suffix: 1000 }, objectSize: 3000,
expectedByteRange: { range: [2000, 2999] } },
{ rangeSpec: { suffix: 4000 }, objectSize: 3000,
expectedByteRange: { range: [0, 2999] } },
{ rangeSpec: { start: 2999 }, objectSize: 3000,
expectedByteRange: { range: [2999, 2999] } },
{ rangeSpec: { start: 3000 }, objectSize: 3000,
expectedByteRange: { error: errors.InvalidRange } },
{ rangeSpec: { start: 0, end: 10 }, objectSize: 0,
expectedByteRange: { error: errors.InvalidRange } },
{ rangeSpec: { suffix: 10 }, objectSize: 0,
expectedByteRange: { } },
{ rangeSpec: { suffix: 0 }, objectSize: 0,
expectedByteRange: { error: errors.InvalidRange } },
expectedByteRange: { range: [1000, 2000] } },
{ rangeSpec: { start: 1000, end: 5000 }, objectSize: 3000,
expectedByteRange: { range: [1000, 2999] } },
{ rangeSpec: { start: 1000 }, objectSize: 3000,
expectedByteRange: { range: [1000, 2999] } },
{ rangeSpec: { suffix: 1000 }, objectSize: 3000,
expectedByteRange: { range: [2000, 2999] } },
{ rangeSpec: { suffix: 4000 }, objectSize: 3000,
expectedByteRange: { range: [0, 2999] } },
{ rangeSpec: { start: 2999 }, objectSize: 3000,
expectedByteRange: { range: [2999, 2999] } },
{ rangeSpec: { start: 3000 }, objectSize: 3000,
expectedByteRange: { error: errors.InvalidRange } },
{ rangeSpec: { start: 0, end: 10 }, objectSize: 0,
expectedByteRange: { error: errors.InvalidRange } },
{ rangeSpec: { suffix: 10 }, objectSize: 0,
expectedByteRange: { } },
{ rangeSpec: { suffix: 0 }, objectSize: 0,
expectedByteRange: { error: errors.InvalidRange } },
].forEach(testCase => {
const { rangeSpec, objectSize, expectedByteRange } = testCase;

View File

@ -34,20 +34,20 @@ describe('REST interface for blob data storage', () => {
function setup(done) {
temp.mkdir('test-REST-data-dir', (err, tempDir) => {
dataStore = new DataFileStore({ dataPath: tempDir,
noSync: true,
logApi: clientLogApi,
});
noSync: true,
logApi: clientLogApi,
});
server = new RESTServer({ port: 6677,
dataStore,
log: { logLevel: 'info',
dumpLevel: 'error' },
});
dataStore,
log: { logLevel: 'info',
dumpLevel: 'error' },
});
server.setup(() => {
server.start();
client = new RESTClient({ host: 'localhost',
port: 6677,
logApi: clientLogApi,
});
port: 6677,
logApi: clientLogApi,
});
done();
});
});
@ -150,20 +150,20 @@ describe('REST interface for blob data storage', () => {
// successful range queries
[{ range: [10, 20],
sliceArgs: [10, 21], contentRange: [10, 20] },
{ range: [10, undefined],
sliceArgs: [10], contentRange: [10, contents.length - 1] },
{ range: [10, 1000],
sliceArgs: [10], contentRange: [10, contents.length - 1] },
{ range: [undefined, 10],
sliceArgs: [-10], contentRange: [contents.length - 10,
contents.length - 1] },
{ range: [undefined, contents.length + 2],
sliceArgs: [-(contents.length + 2)],
contentRange: [0, contents.length - 1] },
{ range: [contents.length - 1, undefined],
sliceArgs: [-1], contentRange: [contents.length - 1,
contents.length - 1] }]
sliceArgs: [10, 21], contentRange: [10, 20] },
{ range: [10, undefined],
sliceArgs: [10], contentRange: [10, contents.length - 1] },
{ range: [10, 1000],
sliceArgs: [10], contentRange: [10, contents.length - 1] },
{ range: [undefined, 10],
sliceArgs: [-10], contentRange: [contents.length - 10,
contents.length - 1] },
{ range: [undefined, contents.length + 2],
sliceArgs: [-(contents.length + 2)],
contentRange: [0, contents.length - 1] },
{ range: [contents.length - 1, undefined],
sliceArgs: [-1], contentRange: [contents.length - 1,
contents.length - 1] }]
.forEach((test, i) => {
const { range, sliceArgs, contentRange } = test;
it(`should get the correct range ${range[0]}-${range[1]}`,
@ -175,7 +175,7 @@ describe('REST interface for blob data storage', () => {
const value = resp.read();
assert.strictEqual(
value.toString(),
contents.slice(...sliceArgs));
contents.slice.apply(contents, sliceArgs));
checkContentRange(resp, contentRange[0],
contentRange[1]);
done();

View File

@ -16,21 +16,21 @@ const levelNet = require('../../../../lib/network/rpc/level-net');
// simply forward the API calls to the db as-is
const dbAsyncAPI = {
put: (env, ...args) => {
env.subDb.put(...args);
env.subDb.put.apply(env.subDb, args);
},
del: (env, ...args) => {
env.subDb.del(...args);
env.subDb.del.apply(env.subDb, args);
},
get: (env, ...args) => {
env.subDb.get(...args);
env.subDb.get.apply(env.subDb, args);
},
batch: (env, ...args) => {
env.subDb.batch(...args);
env.subDb.batch.apply(env.subDb, args);
},
};
const dbSyncAPI = {
createReadStream:
(env, ...args) => env.subDb.createReadStream(args),
(env, ...args) => env.subDb.createReadStream.apply(env.subDb, args),
};
describe('level-net - LevelDB over network', () => {

View File

@ -1,647 +0,0 @@
const assert = require('assert');
const Principal = require('../../../lib/policyEvaluator/principal');
const RequestContext = require('../../../lib/policyEvaluator/RequestContext');
const defaultAccountId = '123456789012';
const anotherAccountId = '098765432112';
const defaultAccountArn = `arn:aws:iam::${defaultAccountId}:root`;
const defaultUserArn = `arn:aws:iam::${defaultAccountId}:user/test`;
const defaultRole = `arn:aws:iam::${defaultAccountId}:role/role1`;
const defaultAssumedRole =
`arn:aws:sts::${defaultAccountId}:assumed-role/role1/session`;
const defaultSamlProvider =
`arn:aws:iam::${defaultAccountId}:saml-provider/provider1`;
const defaultFederatedUser =
`arn:aws:sts::${defaultAccountId}:federated-user/foo`;
const anotherAccountArn = `arn:aws:iam::${anotherAccountId}:root`;
const anotherUserArn = `arn:aws:iam::${anotherAccountId}:user/test`;
const defaultValids = {
AWS: [
defaultAccountId,
defaultAccountArn,
],
};
const defaultParams = {
log: {
trace: () => {},
debug: () => {},
info: () => {},
},
};
describe('Principal evaluator', () => {
[
{
name: 'anonymous as Principal (effect Allow) -> grant access',
statement: {
Principal: '*',
Effect: 'Allow',
},
valids: defaultValids,
result: 'Allow',
},
{
name: 'anonymous as Principal (effect Deny) -> deny access',
statement: {
Principal: '*',
Effect: 'Deny',
},
valids: defaultValids,
result: 'Deny',
},
{
name: 'account (arn) in Principal (effect Allow) -> grant access',
statement: {
Principal: {
AWS: defaultAccountArn,
},
Effect: 'Allow',
},
valids: defaultValids,
result: 'Allow',
},
{
name: 'account (arn) in Principal (effect Deny) -> deny access',
statement: {
Principal: {
AWS: [defaultAccountArn],
},
Effect: 'Deny',
},
valids: defaultValids,
result: 'Deny',
},
{
name: 'account (id) in Principal (effect Allow) -> grant access',
statement: {
Principal: {
AWS: defaultAccountId,
},
Effect: 'Allow',
},
valids: defaultValids,
result: 'Allow',
},
{
name: 'account (id) as Principal (effect Deny) -> deny access',
statement: {
Principal: {
AWS: defaultAccountId,
},
Effect: 'Deny',
},
valids: defaultValids,
result: 'Deny',
},
{
name: 'account not in Principal (effect Allow) -> neutral',
statement: {
Principal: {
AWS: [anotherAccountId],
},
Effect: 'Allow',
},
valids: defaultValids,
result: 'Neutral',
},
{
name: 'account not in Principal (effect Deny) -> neutral',
statement: {
Principal: {
AWS: [anotherAccountId],
},
Effect: 'Deny',
},
valids: defaultValids,
result: 'Neutral',
},
{
name:
'multiple account as Principal (effect Allow) -> grant access',
statement: {
Principal: {
AWS: [anotherAccountId, defaultAccountId],
},
Effect: 'Allow',
},
valids: defaultValids,
result: 'Allow',
},
{
name: 'anonymous as NotPrincipal (effect Allow) -> neutral',
statement: {
NotPrincipal: '*',
Effect: 'Allow',
},
valids: defaultValids,
result: 'Neutral',
},
{
name: 'anonymous as NotPrincipal (effect Deny) -> neutral',
statement: {
NotPrincipal: '*',
Effect: 'Allow',
},
valids: defaultValids,
result: 'Neutral',
},
{
name: 'account (arn) in NotPrincipal (effect Allow) -> neutral',
statement: {
NotPrincipal: {
AWS: defaultAccountArn,
},
Effect: 'Allow',
},
valids: defaultValids,
result: 'Neutral',
},
{
name: 'account (arn) in NotPrincipal (effect Deny) -> neutral',
statement: {
NotPrincipal: {
AWS: [anotherAccountArn, defaultAccountArn],
},
Effect: 'Allow',
},
valids: defaultValids,
result: 'Neutral',
},
{
name: 'account (arn) not in NotPrincipal (effect Allow) -> ' +
'grant access',
statement: {
NotPrincipal: {
AWS: anotherAccountArn,
},
Effect: 'Allow',
},
valids: defaultValids,
result: 'Allow',
},
{
name: 'account (arn) not in NotPrincipal (effect Deny) -> ' +
'deny access',
statement: {
NotPrincipal: {
AWS: anotherAccountArn,
},
Effect: 'Deny',
},
valids: defaultValids,
result: 'Deny',
},
{
name: 'Other entities than AWS in principal (effect Allow) -> ' +
'neutral',
statement: {
Principal: {
Service: 'backbeat',
},
Effect: 'Allow',
},
valids: defaultValids,
result: 'Neutral',
},
{
name: 'Other entities than AWS in principal (effect Deny) -> ' +
'neutral',
statement: {
Principal: {
Service: 'backbeat',
},
Effect: 'Deny',
},
valids: defaultValids,
result: 'Neutral',
},
{
name: 'Service in Principal (effect Allow) -> grant access',
statement: {
Principal: {
Service: 'backbeat',
},
Effect: 'Allow',
},
valids: {
Service: 'backbeat',
},
result: 'Allow',
},
{
name: 'User as principal (effect Allow) -> grant access',
statement: {
Principal: {
AWS: `arn:aws:iam::${defaultAccountId}:user/test`,
},
Effect: 'Allow',
},
valids: {
AWS: `arn:aws:iam::${defaultAccountId}:user/test`,
},
result: 'Allow',
},
{
name: 'User not in Principal (effect Allow) -> neutral',
statement: {
Principal: {
AWS: `arn:aws:iam::${defaultAccountId}:user/test`,
},
Effect: 'Allow',
},
valids: {
AWS: `arn:aws:iam::${defaultAccountId}:user/another/testUser`,
},
result: 'Neutral',
},
{
name: 'Role in Principal (effect Allow) -> grant access',
statement: {
Principal: {
AWS: `arn:aws:iam::${defaultAccountId}:role/role1`,
},
Effect: 'Allow',
},
valids: {
AWS: [
`arn:aws:iam::${defaultAccountId}:role/role1`,
`arn:aws:iam::${defaultAccountId}:assumed-role` +
'/role1/session',
],
},
result: 'Allow',
},
{
name: 'Role in Principal (effect Deny) -> deny access',
statement: {
Principal: {
AWS: `arn:aws:iam::${defaultAccountId}:role/role1`,
},
Effect: 'Deny',
},
valids: {
AWS: [
`arn:aws:iam::${defaultAccountId}:role/role1`,
`arn:aws:iam::${defaultAccountId}:assumed-role` +
'/role1/session',
],
},
result: 'Deny',
},
].forEach(test => {
it(`_evaluatePrincipalField(): ${test.name}`, () => {
assert.strictEqual(Principal._evaluatePrincipalField(defaultParams,
test.statement, test.valids), test.result);
});
});
[
{
name: 'should allow with a neutral',
statement: [
{
Principal: {
AWS: anotherAccountArn,
},
Effect: 'Deny',
},
{
Principal: {
AWS: defaultAccountArn,
},
Effect: 'Allow',
},
],
valids: defaultValids,
result: 'Allow',
},
{
name: 'should deny even with an allow',
statement: [
{
Principal: {
AWS: defaultAccountArn,
},
Effect: 'Allow',
},
{
Principal: {
AWS: defaultAccountArn,
},
Effect: 'Deny',
},
],
valids: defaultValids,
result: 'Deny',
},
{
name: 'should deny if no matches',
statement: [
{
Principal: {
AWS: anotherAccountArn,
},
Effect: 'Allow',
},
],
valids: defaultValids,
result: 'Deny',
},
].forEach(test => {
it(`_evaluatePrincipal(): ${test.name}`, () => {
const params = {
log: defaultParams.log,
trustedPolicy: {
Statement: test.statement,
},
};
const valids = test.valids;
assert.strictEqual(Principal._evaluatePrincipal(params, valids),
test.result);
});
});
[
{
name: 'should check user inside the same account',
statement: [
{
Principal: {
AWS: defaultUserArn,
},
Effect: 'Allow',
},
],
requester: {
accountId: defaultAccountId,
arn: defaultUserArn,
parentArn: null,
userType: 'User',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Allow',
checkAction: false,
},
},
{
name: 'should deny user inside the same account',
statement: [
{
Principal: {
AWS: defaultUserArn,
},
Effect: 'Allow',
},
],
requester: {
accountId: defaultAccountId,
arn: `arn:aws:iam::${defaultAccountId}:user/anotherUser`,
parentArn: null,
userType: 'User',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Deny',
checkAction: false,
},
},
{
name: 'should deny principal if account is deny',
statement: [
{
Principal: {
AWS: defaultAccountId,
},
Effect: 'Deny',
},
{
Principal: {
AWS: defaultUserArn,
},
Effect: 'Allow',
},
],
requester: {
accountId: defaultAccountId,
arn: defaultUserArn,
parentArn: null,
userType: 'User',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Deny',
checkAction: false,
},
},
{
name: 'should deny assumed role if role is deny',
statement: [
{
Principal: {
AWS: defaultRole,
},
Effect: 'Deny',
},
{
Principal: {
AWS: defaultAssumedRole,
},
Effect: 'Allow',
},
],
requester: {
accountId: defaultAccountId,
arn: defaultAssumedRole,
parentArn: defaultRole,
userType: 'AssumedRole',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Deny',
checkAction: false,
},
},
{
name: 'should deny user as principal if account is different',
statement: [
{
Principal: {
AWS: anotherUserArn,
},
Effect: 'Allow',
},
],
requester: {
accountId: anotherAccountId,
arn: anotherUserArn,
parentArn: null,
userType: 'User',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Deny',
checkAction: true,
},
},
{
name: 'should allow user if account is in principal',
statement: [
{
Principal: {
AWS: anotherAccountArn,
},
Effect: 'Allow',
},
],
requester: {
accountId: anotherAccountId,
arn: anotherUserArn,
parentArn: null,
userType: 'User',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Allow',
checkAction: true,
},
},
{
name: 'should allow service as principal',
statement: [
{
Principal: {
Service: 'backbeat',
},
Effect: 'Allow',
},
],
requester: {
accountId: defaultAccountId,
arn: 'backbeat',
parentArn: null,
userType: 'Service',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Allow',
checkAction: false,
},
},
{
name: 'should allow federated provider',
statement: [
{
Principal: {
Federated: defaultSamlProvider,
},
Effect: 'Allow',
},
],
requester: {
accountId: defaultAccountId,
arn: defaultFederatedUser,
parentArn: defaultSamlProvider,
userType: 'Federated',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Allow',
checkAction: false,
},
},
{
name: 'should not allow when external id not matching',
statement: [
{
Principal: {
AWS: anotherAccountId,
},
Effect: 'Allow',
Condition: {
StringEquals: { 'sts:ExternalId': '12345' },
},
},
],
requester: {
accountId: anotherAccountId,
arn: anotherUserArn,
parentArn: null,
userType: 'User',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Deny',
checkAction: true,
},
},
{
name: 'should allow when external id matching',
statement: [
{
Principal: {
AWS: anotherAccountId,
},
Effect: 'Allow',
Condition: {
StringEquals: { 'sts:ExternalId': '4321' },
},
},
],
requester: {
accountId: anotherAccountId,
arn: anotherUserArn,
parentArn: null,
userType: 'User',
},
target: {
accountId: defaultAccountId,
},
result: {
result: 'Allow',
checkAction: true,
},
},
].forEach(test => {
it(`evaluatePrincipal(): ${test.name}`, () => {
const rc = new RequestContext({}, {}, '', '', '127.0.0.1',
false, 'assumeRole', 'sts', null, {
accountid: test.requester.accountId,
arn: test.requester.arn,
parentArn: test.requester.parentArn,
principalType: test.requester.userType,
externalId: '4321',
}, 'v4', 'V4');
const params = {
log: defaultParams.log,
trustedPolicy: {
Statement: test.statement,
},
rc,
targetAccountId: test.target.accountId,
};
const result = Principal.evaluatePrincipal(params);
assert.deepStrictEqual(result, test.result);
});
});
});

View File

@ -252,10 +252,10 @@ describe('policyEvaluator', () => {
() => {
policy.Statement.Action = 's3:ListBucket';
policy.Statement.Condition = { StringEquals:
{ 's3:prefix': [
'home/${aws:username}/*${?}${*}${$}${}?',
'home/',
] } };
{ 's3:prefix': [
'home/${aws:username}/*${?}${*}${$}${}?',
'home/',
] } };
const rcModifiers = {
_query: {
prefix: 'home/Roger/*?*$${}?',
@ -385,7 +385,7 @@ describe('policyEvaluator', () => {
() => {
policy.Statement.Resource = 'arn:aws:s3:::bucket/*';
policy.Statement.Condition = { StringNotEquals:
{ 's3:x-amz-acl':
{ 's3:x-amz-acl':
['public-read', 'public-read-write'] } };
const rcModifiers = {
_generalResource: 'bucket',
@ -402,7 +402,7 @@ describe('policyEvaluator', () => {
() => {
policy.Statement.Resource = 'arn:aws:s3:::bucket/*';
policy.Statement.Condition = { StringNotEquals:
{ 's3:x-amz-acl':
{ 's3:x-amz-acl':
['public-read', 'public-read-write'] } };
const rcModifiers = {
_generalResource: 'bucket',
@ -419,7 +419,7 @@ describe('policyEvaluator', () => {
() => {
policy.Statement.Resource = 'arn:aws:s3:::bucket/*';
policy.Statement.Condition = { StringNotEquals:
{ 's3:x-amz-acl':
{ 's3:x-amz-acl':
['public-read', 'public-read-write'] } };
const rcModifiers = {
_generalResource: 'bucket',
@ -432,7 +432,7 @@ describe('policyEvaluator', () => {
'if do not meet condition',
() => {
policy.Statement.Condition = { StringEqualsIgnoreCase:
{ 'aws:UserAgent':
{ 'aws:UserAgent':
['CyberSquaw', 's3Sergeant', 'jetSetter'] } };
// Not one of the options
const rcModifiers = {
@ -447,7 +447,7 @@ describe('policyEvaluator', () => {
'if meet condition',
() => {
policy.Statement.Condition = { StringEqualsIgnoreCase:
{ 'aws:UserAgent':
{ 'aws:UserAgent':
['CyberSquaw', 's3Sergeant', 'jetSetter'] } };
const rcModifiers = {
_headers: {
@ -461,7 +461,7 @@ describe('policyEvaluator', () => {
'if do not meet condition',
() => {
policy.Statement.Condition = { StringNotEqualsIgnoreCase:
{ 'aws:UserAgent':
{ 'aws:UserAgent':
['CyberSquaw', 's3Sergeant', 'jetSetter'] } };
const rcModifiers = {
_headers: {
@ -475,7 +475,7 @@ describe('policyEvaluator', () => {
'if meet condition',
() => {
policy.Statement.Condition = { StringNotEqualsIgnoreCase:
{ 'aws:UserAgent':
{ 'aws:UserAgent':
['CyberSquaw', 's3Sergeant', 'jetSetter'] } };
const rcModifiers = {
_headers: {
@ -488,7 +488,7 @@ describe('policyEvaluator', () => {
'if condition parameter is completely missing from request',
() => {
policy.Statement.Condition = { StringNotEqualsIgnoreCase:
{ 'aws:UserAgent':
{ 'aws:UserAgent':
['CyberSquaw', 's3Sergeant', 'jetSetter'] } };
const rcModifiers = {};
check(requestContext, rcModifiers, policy, 'Allow');
@ -621,7 +621,7 @@ describe('policyEvaluator', () => {
'if do not meet condition',
() => {
policy.Statement.Condition = { DateEquals:
{ 'aws:TokenIssueTime':
{ 'aws:TokenIssueTime':
'2016-06-30T19:42:23.531Z' } };
let rcModifiers =
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
@ -635,7 +635,7 @@ describe('policyEvaluator', () => {
'if do not meet condition',
() => {
policy.Statement.Condition =
{ 'aws:EpochTime':
{ 'aws:EpochTime':
'1467315743531' };
const rcModifiers =
{ _tokenIssueTime: '1467315743431' };
@ -646,7 +646,7 @@ describe('policyEvaluator', () => {
'if meet condition',
() => {
policy.Statement.Condition = { DateEquals:
{ 'aws:TokenIssueTime':
{ 'aws:TokenIssueTime':
'2016-06-30T19:42:23.431Z' } };
let rcModifiers =
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
@ -661,7 +661,7 @@ describe('policyEvaluator', () => {
() => {
const clock = lolex.install(1467315743431);
policy.Statement.Condition = { DateEquals:
{ 'aws:EpochTime':
{ 'aws:EpochTime':
'1467315743431' } };
check(requestContext, {}, policy, 'Allow');
clock.uninstall();
@ -671,7 +671,7 @@ describe('policyEvaluator', () => {
'if do not meet condition',
() => {
policy.Statement.Condition = { DateNotEquals:
{ 'aws:TokenIssueTime':
{ 'aws:TokenIssueTime':
'2016-06-30T19:42:23.431Z' } };
let rcModifiers =
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
@ -686,7 +686,7 @@ describe('policyEvaluator', () => {
() => {
const clock = lolex.install(1467315743431);
policy.Statement.Condition = { DateNotEquals:
{ 'aws:EpochTime':
{ 'aws:EpochTime':
'1467315743431' } };
check(requestContext, {}, policy, 'Neutral');
clock.uninstall();
@ -696,7 +696,7 @@ describe('policyEvaluator', () => {
'if meet condition',
() => {
policy.Statement.Condition = { DateNotEquals:
{ 'aws:TokenIssueTime':
{ 'aws:TokenIssueTime':
'2016-06-30T19:42:23.531Z' } };
let rcModifiers =
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
@ -710,7 +710,7 @@ describe('policyEvaluator', () => {
'time if meet condition',
() => {
policy.Statement.Condition = { DateNotEquals:
{ 'aws:EpochTime':
{ 'aws:EpochTime':
'1467315743531' } };
check(requestContext, {}, policy, 'Allow');
});
@ -719,7 +719,7 @@ describe('policyEvaluator', () => {
'condition with ISO time if do not meet condition',
() => {
policy.Statement.Condition = { DateLessThan:
{ 'aws:TokenIssueTime':
{ 'aws:TokenIssueTime':
'2016-06-30T19:42:23.431Z' } };
let rcModifiers =
{ _tokenIssueTime: '2016-06-30T19:42:23.531Z' };
@ -733,7 +733,7 @@ describe('policyEvaluator', () => {
'with ISO time if do not meet condition',
() => {
policy.Statement.Condition = { DateLessThan:
{ 'aws:CurrentTime':
{ 'aws:CurrentTime':
'2016-06-30T19:42:23.431Z' } };
check(requestContext, {}, policy, 'Neutral');
});
@ -742,7 +742,7 @@ describe('policyEvaluator', () => {
'with epoch time if do not meet condition',
() => {
policy.Statement.Condition = { DateLessThan:
{ 'aws:EpochTime':
{ 'aws:EpochTime':
'1467315743431' } };
check(requestContext, {}, policy, 'Neutral');
});
@ -751,8 +751,8 @@ describe('policyEvaluator', () => {
'condition if meet condition',
() => {
policy.Statement.Condition = { DateLessThan:
{ 'aws:TokenIssueTime':
['2016-06-30T19:42:23.431Z', '2017-06-30T19:42:23.431Z',
{ 'aws:TokenIssueTime':
['2016-06-30T19:42:23.431Z', '2017-06-30T19:42:23.431Z',
'2018-06-30T19:42:23.431Z'] },
};
const rcModifiers =
@ -764,7 +764,7 @@ describe('policyEvaluator', () => {
'condition if meet condition',
() => {
policy.Statement.Condition = { DateLessThan:
{ 'aws:CurrentTime':
{ 'aws:CurrentTime':
'2099-06-30T19:42:23.431Z' } };
check(requestContext, {}, policy, 'Allow');
const rcModifiers = { _tokenIssueTime: '1467315743331' };
@ -775,7 +775,7 @@ describe('policyEvaluator', () => {
'condition if meet condition',
() => {
policy.Statement.Condition = { DateLessThan:
{ 'aws:EpochTime':
{ 'aws:EpochTime':
'4086531743431' } };
check(requestContext, {}, policy, 'Allow');
});
@ -784,7 +784,7 @@ describe('policyEvaluator', () => {
'with ISO time if do not meet condition',
() => {
policy.Statement.Condition = { DateLessThanEquals:
{ 'aws:TokenIssueTime':
{ 'aws:TokenIssueTime':
'2016-06-30T19:42:23.431Z' } };
const rcModifiers =
{ _tokenIssueTime: '2016-06-30T19:42:23.531Z' };
@ -795,7 +795,7 @@ describe('policyEvaluator', () => {
'condition with ISO time if do not meet condition',
() => {
policy.Statement.Condition = { DateLessThanEquals:
{ 'aws:CurrentTime':
{ 'aws:CurrentTime':
'2016-06-30T19:42:23.431Z' } };
check(requestContext, {}, policy, 'Neutral');
});
@ -804,7 +804,7 @@ describe('policyEvaluator', () => {
'with ISO time if meet condition',
() => {
policy.Statement.Condition = { DateLessThanEquals:
{ 'aws:TokenIssueTime':
{ 'aws:TokenIssueTime':
'2016-06-30T19:42:23.431Z' } };
const rcModifiers =
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
@ -815,7 +815,7 @@ describe('policyEvaluator', () => {
'condition with ISO time if meet condition',
() => {
policy.Statement.Condition = { DateLessThanEquals:
{ 'aws:CurrentTime':
{ 'aws:CurrentTime':
'2099-06-30T19:42:23.431Z' } };
check(requestContext, {}, policy, 'Allow');
});
@ -824,7 +824,7 @@ describe('policyEvaluator', () => {
'with ISO time if do not meet condition',
() => {
policy.Statement.Condition = { DateGreaterThan:
{ 'aws:TokenIssueTime':
{ 'aws:TokenIssueTime':
'2016-06-30T19:42:23.431Z' } };
const rcModifiers =
{ _tokenIssueTime: '2016-06-30T19:42:23.331Z' };
@ -835,7 +835,7 @@ describe('policyEvaluator', () => {
'condition with ISO time if do not meet condition',
() => {
policy.Statement.Condition = { DateGreaterThan:
{ 'aws:CurrentTime':
{ 'aws:CurrentTime':
'2099-06-30T19:42:23.431Z' } };
check(requestContext, {}, policy, 'Neutral');
});
@ -844,7 +844,7 @@ describe('policyEvaluator', () => {
'with ISO time if meet condition',
() => {
policy.Statement.Condition = { DateGreaterThan:
{ 'aws:TokenIssueTime':
{ 'aws:TokenIssueTime':
'2016-06-30T19:42:23.431Z' } };
const rcModifiers =
{ _tokenIssueTime: '2016-06-30T19:42:23.531Z' };
@ -855,7 +855,7 @@ describe('policyEvaluator', () => {
'condition with ISO time if meet condition',
() => {
policy.Statement.Condition = { DateGreaterThan:
{ 'aws:CurrentTime':
{ 'aws:CurrentTime':
'2016-06-30T19:42:23.431Z' } };
check(requestContext, {}, policy, 'Allow');
});
@ -864,7 +864,7 @@ describe('policyEvaluator', () => {
'with ISO time if do not meet condition',
() => {
policy.Statement.Condition = { DateGreaterThanEquals:
{ 'aws:TokenIssueTime':
{ 'aws:TokenIssueTime':
'2016-06-30T19:42:23.431Z' } };
const rcModifiers =
{ _tokenIssueTime: '2016-06-30T19:42:23.331Z' };
@ -875,7 +875,7 @@ describe('policyEvaluator', () => {
'condition with ISO time if do not meet condition',
() => {
policy.Statement.Condition = { DateGreaterThanEquals:
{ 'aws:CurrentTime':
{ 'aws:CurrentTime':
'2099-06-30T19:42:23.431Z' } };
check(requestContext, {}, policy, 'Neutral');
});
@ -884,7 +884,7 @@ describe('policyEvaluator', () => {
'condition with ISO time if meet condition',
() => {
policy.Statement.Condition = { DateGreaterThanEquals:
{ 'aws:TokenIssueTime':
{ 'aws:TokenIssueTime':
'2016-06-30T19:42:23.431Z' } };
const rcModifiers =
{ _tokenIssueTime: '2016-06-30T19:42:23.431Z' };
@ -895,7 +895,7 @@ describe('policyEvaluator', () => {
'time condition with ISO time if meet condition',
() => {
policy.Statement.Condition = { DateGreaterThanEquals:
{ 'aws:CurrentTime':
{ 'aws:CurrentTime':
'2016-06-30T19:42:23.431Z' } };
check(requestContext, {}, policy, 'Allow');
});
@ -1078,36 +1078,6 @@ describe('policyEvaluator', () => {
check(requestContext, {}, policy, 'Allow');
});
it('should allow policy arn if meet condition',
() => {
policy.Statement.Condition = {
ArnLike: { 'iam:PolicyArn':
['arn:aws:iam::012345678901:policy/dev/*'] },
};
requestContext.setRequesterInfo(
{ accountid: '012345678901' });
const rcModifiers = {
_policyArn:
'arn:aws:iam::012345678901:policy/dev/devMachine1',
};
check(requestContext, rcModifiers, policy, 'Allow');
});
it('should not allow policy arn if do not meet condition',
() => {
policy.Statement.Condition = {
ArnLike: { 'iam:PolicyArn':
['arn:aws:iam::012345678901:policy/dev/*'] },
};
requestContext.setRequesterInfo(
{ accountid: '012345678901' });
const rcModifiers = {
_policyArn:
'arn:aws:iam::012345678901:policy/admin/deleteUser',
};
check(requestContext, rcModifiers, policy, 'Neutral');
});
it('should allow access with multiple operator conditions ' +
'and multiple conditions under an operator',
() => {
@ -1172,7 +1142,7 @@ describe('policyEvaluator', () => {
requestContext.setRequesterInfo({});
const result = evaluateAllPolicies(requestContext,
[samples['arn:aws:iam::aws:policy/AmazonS3FullAccess'],
samples['Deny Bucket Policy']], log);
samples['Deny Bucket Policy']], log);
assert.strictEqual(result, 'Deny');
});
@ -1183,7 +1153,7 @@ describe('policyEvaluator', () => {
requestContext.setRequesterInfo({});
const result = evaluateAllPolicies(requestContext,
[samples['Multi-Statement Policy'],
samples['Variable Bucket Policy']], log);
samples['Variable Bucket Policy']], log);
assert.strictEqual(result, 'Deny');
});
@ -1195,7 +1165,7 @@ describe('policyEvaluator', () => {
requestContext.setRequesterInfo({});
const result = evaluateAllPolicies(requestContext,
[samples['Multi-Statement Policy'],
samples['Variable Bucket Policy']], log);
samples['Variable Bucket Policy']], log);
assert.strictEqual(result, 'Deny');
});
});
@ -1262,13 +1232,11 @@ describe('handleWildcards', () => {
assert.deepStrictEqual(result, '^abc\\*abc\\?abc\\$$');
});
/* eslint-disable no-useless-escape */
it('should escape other regular expression special characters', () => {
const result = handleWildcards('*^.+?()|[\]{}');
assert.deepStrictEqual(result,
'^.*?\\^\\.\\+.{1}\\(\\)\\|\\[\\\]\\{\\}$');
});
/* eslint-enable */
});
describe('substituteVariables', () => {

View File

@ -1,73 +0,0 @@
const assert = require('assert');
const azureMpuUtils =
require('../../../../lib/s3middleware/azureHelpers/mpuUtils');
const padString = azureMpuUtils.padString;
const getSubPartInfo = azureMpuUtils.getSubPartInfo;
const padStringTests = [
{
category: 'partNumber',
strings: [1, 10, 100, 10000],
expectedResults: ['00001', '00010', '00100', '10000'],
}, {
category: 'subPart',
strings: [1, 50],
expectedResults: ['01', '50'],
}, {
category: 'part',
strings: ['test|'],
expectedResults:
['test|%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%'],
},
];
const oneMb = 1024 * 1024;
const oneHundredMb = oneMb * 100;
const subPartInfoTests = [
{
desc: '100 mb',
size: oneHundredMb,
expectedNumberSubParts: 1,
expectedLastPartSize: oneHundredMb,
}, {
desc: '101 mb',
size: oneHundredMb + oneMb,
expectedNumberSubParts: 2,
expectedLastPartSize: oneMb,
}, {
desc: '599 mb',
size: 6 * oneHundredMb - oneMb,
expectedNumberSubParts: 6,
expectedLastPartSize: oneHundredMb - oneMb,
}, {
desc: '600 mb',
size: 6 * oneHundredMb,
expectedNumberSubParts: 6,
expectedLastPartSize: oneHundredMb,
},
];
describe('s3middleware Azure MPU helper utility function', () => {
padStringTests.forEach(test => {
it(`padString should pad a ${test.category}`, done => {
const result = test.strings.map(str =>
padString(str, test.category));
assert.deepStrictEqual(result, test.expectedResults);
done();
});
});
subPartInfoTests.forEach(test => {
const { desc, size, expectedNumberSubParts, expectedLastPartSize }
= test;
it('getSubPartInfo should return correct result for ' +
`dataContentLength of ${desc}`, done => {
const result = getSubPartInfo(size);
const expectedLastPartIndex = expectedNumberSubParts - 1;
assert.strictEqual(result.lastPartIndex, expectedLastPartIndex);
assert.strictEqual(result.lastPartSize, expectedLastPartSize);
done();
});
});
});

View File

@ -1,24 +0,0 @@
const assert = require('assert');
const crypto = require('crypto');
const objectUtils =
require('../../../lib/s3middleware/objectUtils');
const hexHash = 'd41d8cd98f00b204e9800998ecf8427e';
const base64Hash = '1B2M2Y8AsgTpgAmY7PhCfg==';
describe('s3middleware object utilites', () => {
it('should convert hexademal MD5 to base 64', done => {
const hash = crypto.createHash('md5').digest('hex');
const convertedHash = objectUtils.getBase64MD5(hash);
assert.strictEqual(convertedHash, base64Hash);
done();
});
it('should convert base 64 MD5 to hexadecimal', done => {
const hash = crypto.createHash('md5').digest('base64');
const convertedHash = objectUtils.getHexMD5(hash);
assert.strictEqual(convertedHash, hexHash);
done();
});
});

View File

@ -77,7 +77,7 @@ describe('raft record log client', () => {
function setup(done) {
bucketClient = new BucketClientMock();
logClient = new LogConsumer({ bucketClient,
raftSession: 0 });
raftSession: 0 });
done();
}
@ -126,7 +126,7 @@ describe('raft record log client', () => {
describe('error cases', () => {
it('should handle 404 error gracefully', done => {
const logClient = new LogConsumer({ bucketClient,
raftSession: 1 });
raftSession: 1 });
logClient.readRecords({}, (err, info) => {
assert.ifError(err);
assert.deepStrictEqual(info, {
@ -136,7 +136,7 @@ describe('raft record log client', () => {
});
it('should handle 416 error gracefully', done => {
const logClient = new LogConsumer({ bucketClient,
raftSession: 2 });
raftSession: 2 });
logClient.readRecords({}, (err, info) => {
assert.ifError(err);
assert.deepStrictEqual(info, {
@ -146,7 +146,7 @@ describe('raft record log client', () => {
});
it('should handle other errors correctly', done => {
const logClient = new LogConsumer({ bucketClient,
raftSession: 3 });
raftSession: 3 });
logClient.readRecords({}, err => {
assert(err);
assert(err.InternalError);
@ -155,7 +155,7 @@ describe('raft record log client', () => {
});
it('should not crash with malformed log response', done => {
const logClient = new LogConsumer({ bucketClient,
raftSession: 4 });
raftSession: 4 });
logClient.readRecords({}, err => {
assert(err);
assert(err.InternalError);

View File

@ -116,14 +116,14 @@ describe('record log - persistent log of metadata operations', () => {
it('should be able to add records and list them thereafter', done => {
debug('going to append records');
const ops = [{ type: 'put', key: 'foo', value: 'bar',
prefix: ['foobucket'] },
{ type: 'del', key: 'baz',
prefix: ['foobucket'] },
{ type: 'put',
key: 'Pâtisserie=中文-español-English',
value: 'yummy',
prefix: ['foobucket'] },
];
prefix: ['foobucket'] },
{ type: 'del', key: 'baz',
prefix: ['foobucket'] },
{ type: 'put',
key: 'Pâtisserie=中文-español-English',
value: 'yummy',
prefix: ['foobucket'] },
];
logProxy.createLogRecordOps(ops, (err, logEntries) => {
assert.ifError(err);
db.batch(ops.concat(logEntries), err => {
@ -198,7 +198,7 @@ describe('record log - persistent log of metadata operations', () => {
for (let i = 1; i <= 1000; ++i) {
recordsToAdd.push(
{ type: 'put', key: `foo${i}`, value: `bar${i}`,
prefix: ['foobucket'] });
prefix: ['foobucket'] });
}
logProxy.createLogRecordOps(recordsToAdd, (err, logRecs) => {
assert.ifError(err);

View File

@ -163,60 +163,60 @@ describe('test VSP', () => {
};
vsp.put(request, logger, next);
},
(res, next) => {
v1 = Version.from(res).getVersionId();
const request = {
db: 'foo',
key: 'bar',
value: '{"qux":"quz2"}',
options: { versioning: true },
};
vsp.put(request, logger, next);
},
(res, next) => {
v2 = Version.from(res).getVersionId();
(res, next) => {
v1 = Version.from(res).getVersionId();
const request = {
db: 'foo',
key: 'bar',
value: '{"qux":"quz2"}',
options: { versioning: true },
};
vsp.put(request, logger, next);
},
(res, next) => {
v2 = Version.from(res).getVersionId();
// overwriting v1: master should not be updated
const request = {
db: 'foo',
key: 'bar',
value: '{"qux":"quz1.1"}',
options: { versioning: true,
versionId: v1 },
};
vsp.put(request, logger, next);
},
(res, next) => {
const request = {
db: 'foo',
key: 'bar',
};
vsp.get(request, logger, next);
},
(res, next) => {
assert.strictEqual(JSON.parse(res).qux, 'quz2');
const request = {
db: 'foo',
key: 'bar',
value: '{"qux":"quz1.1"}',
options: { versioning: true,
versionId: v1 },
};
vsp.put(request, logger, next);
},
(res, next) => {
const request = {
db: 'foo',
key: 'bar',
};
vsp.get(request, logger, next);
},
(res, next) => {
assert.strictEqual(JSON.parse(res).qux, 'quz2');
// overwriting v2: master should be updated
const request = {
db: 'foo',
key: 'bar',
value: '{"qux":"quz2.1"}',
options: { versioning: true,
versionId: v2 },
};
vsp.put(request, logger, next);
},
(res, next) => {
const request = {
db: 'foo',
key: 'bar',
};
vsp.get(request, logger, next);
},
(res, next) => {
assert.strictEqual(JSON.parse(res).qux, 'quz2.1');
next();
}],
const request = {
db: 'foo',
key: 'bar',
value: '{"qux":"quz2.1"}',
options: { versioning: true,
versionId: v2 },
};
vsp.put(request, logger, next);
},
(res, next) => {
const request = {
db: 'foo',
key: 'bar',
};
vsp.get(request, logger, next);
},
(res, next) => {
assert.strictEqual(JSON.parse(res).qux, 'quz2.1');
next();
}],
done);
});
});