Compare commits

..

No commits in common. "fd8a49e71c2bec70a1eaac77f5a05c31610ad503" and "80b91d724d733247d6eabbb0a93fbf68969a822d" have entirely different histories.

297 changed files with 5936 additions and 8476 deletions

3
.gitignore vendored
View File

@ -13,6 +13,3 @@ node_modules/
# Coverage
coverage/
.nyc_output/
# TypeScript
build/

View File

@ -1,4 +0,0 @@
{
"tabWidth": 4,
"singleQuote": true
}

197
index.js Normal file
View File

@ -0,0 +1,197 @@
module.exports = {
auth: require('./lib/auth/auth'),
constants: require('./lib/constants'),
db: require('./lib/db'),
errors: require('./lib/errors.js'),
errorUtils: require('./lib/errorUtils'),
shuffle: require('./lib/shuffle'),
stringHash: require('./lib/stringHash'),
ipCheck: require('./lib/ipCheck'),
jsutil: require('./lib/jsutil'),
https: {
ciphers: require('./lib/https/ciphers.js'),
dhparam: require('./lib/https/dh2048.js'),
},
algorithms: {
list: require('./lib/algos/list/exportAlgos'),
listTools: {
DelimiterTools: require('./lib/algos/list/tools'),
},
cache: {
LRUCache: require('./lib/algos/cache/LRUCache'),
},
stream: {
MergeStream: require('./lib/algos/stream/MergeStream'),
},
SortedSet: require('./lib/algos/set/SortedSet'),
},
policies: {
evaluators: require('./lib/policyEvaluator/evaluator.js'),
validateUserPolicy: require('./lib/policy/policyValidator')
.validateUserPolicy,
evaluatePrincipal: require('./lib/policyEvaluator/principal'),
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
requestUtils: require('./lib/policyEvaluator/requestUtils'),
actionMaps: require('./lib/policyEvaluator/utils/actionMaps'),
},
Clustering: require('./lib/Clustering'),
testing: {
matrix: require('./lib/testing/matrix.js'),
},
versioning: {
VersioningConstants: require('./lib/versioning/constants.js')
.VersioningConstants,
Version: require('./lib/versioning/Version.js').Version,
VersionID: require('./lib/versioning/VersionID.js'),
},
network: {
http: {
server: require('./lib/network/http/server'),
},
rpc: require('./lib/network/rpc/rpc'),
level: require('./lib/network/rpc/level-net'),
rest: {
RESTServer: require('./lib/network/rest/RESTServer'),
RESTClient: require('./lib/network/rest/RESTClient'),
},
RoundRobin: require('./lib/network/RoundRobin'),
probe: {
ProbeServer: require('./lib/network/probe/ProbeServer'),
HealthProbeServer:
require('./lib/network/probe/HealthProbeServer.js'),
Utils: require('./lib/network/probe/Utils.js'),
},
kmip: require('./lib/network/kmip'),
kmipClient: require('./lib/network/kmip/Client'),
},
s3routes: {
routes: require('./lib/s3routes/routes'),
routesUtils: require('./lib/s3routes/routesUtils'),
},
s3middleware: {
userMetadata: require('./lib/s3middleware/userMetadata'),
convertToXml: require('./lib/s3middleware/convertToXml'),
escapeForXml: require('./lib/s3middleware/escapeForXml'),
objectLegalHold: require('./lib/s3middleware/objectLegalHold'),
tagging: require('./lib/s3middleware/tagging'),
checkDateModifiedHeaders:
require('./lib/s3middleware/validateConditionalHeaders')
.checkDateModifiedHeaders,
validateConditionalHeaders:
require('./lib/s3middleware/validateConditionalHeaders')
.validateConditionalHeaders,
MD5Sum: require('./lib/s3middleware/MD5Sum'),
NullStream: require('./lib/s3middleware/nullStream'),
objectUtils: require('./lib/s3middleware/objectUtils'),
azureHelper: {
mpuUtils:
require('./lib/s3middleware/azureHelpers/mpuUtils'),
ResultsCollector:
require('./lib/s3middleware/azureHelpers/ResultsCollector'),
SubStreamInterface:
require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
},
prepareStream: require('./lib/s3middleware/prepareStream'),
processMpuParts: require('./lib/s3middleware/processMpuParts'),
retention: require('./lib/s3middleware/objectRetention'),
lifecycleHelpers: require('./lib/s3middleware/lifecycleHelpers'),
},
storage: {
metadata: {
MetadataWrapper: require('./lib/storage/metadata/MetadataWrapper'),
bucketclient: {
BucketClientInterface:
require('./lib/storage/metadata/bucketclient/' +
'BucketClientInterface'),
LogConsumer:
require('./lib/storage/metadata/bucketclient/LogConsumer'),
},
file: {
BucketFileInterface:
require('./lib/storage/metadata/file/BucketFileInterface'),
MetadataFileServer:
require('./lib/storage/metadata/file/MetadataFileServer'),
MetadataFileClient:
require('./lib/storage/metadata/file/MetadataFileClient'),
},
inMemory: {
metastore:
require('./lib/storage/metadata/in_memory/metastore'),
metadata: require('./lib/storage/metadata/in_memory/metadata'),
bucketUtilities:
require('./lib/storage/metadata/in_memory/bucket_utilities'),
},
mongoclient: {
MongoClientInterface:
require('./lib/storage/metadata/mongoclient/' +
'MongoClientInterface'),
LogConsumer:
require('./lib/storage/metadata/mongoclient/LogConsumer'),
},
proxy: {
Server: require('./lib/storage/metadata/proxy/Server'),
},
},
data: {
DataWrapper: require('./lib/storage/data/DataWrapper'),
MultipleBackendGateway:
require('./lib/storage/data/MultipleBackendGateway'),
parseLC: require('./lib/storage/data/LocationConstraintParser'),
file: {
DataFileStore:
require('./lib/storage/data/file/DataFileStore'),
DataFileInterface:
require('./lib/storage/data/file/DataFileInterface'),
},
external: {
AwsClient: require('./lib/storage/data/external/AwsClient'),
AzureClient: require('./lib/storage/data/external/AzureClient'),
GcpClient: require('./lib/storage/data/external/GcpClient'),
GCP: require('./lib/storage/data/external/GCP/GcpService'),
GcpUtils: require('./lib/storage/data/external/GCP/GcpUtils'),
GcpSigner: require('./lib/storage/data/external/GCP/GcpSigner'),
PfsClient: require('./lib/storage/data/external/PfsClient'),
backendUtils: require('./lib/storage/data/external/utils'),
},
inMemory: {
datastore: require('./lib/storage/data/in_memory/datastore'),
},
},
utils: require('./lib/storage/utils'),
},
models: {
BackendInfo: require('./lib/models/BackendInfo'),
BucketInfo: require('./lib/models/BucketInfo'),
BucketAzureInfo: require('./lib/models/BucketAzureInfo'),
ObjectMD: require('./lib/models/ObjectMD'),
ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
ObjectMDAzureInfo: require('./lib/models/ObjectMDAzureInfo'),
ARN: require('./lib/models/ARN'),
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
ReplicationConfiguration:
require('./lib/models/ReplicationConfiguration'),
LifecycleConfiguration:
require('./lib/models/LifecycleConfiguration'),
LifecycleRule: require('./lib/models/LifecycleRule'),
BucketPolicy: require('./lib/models/BucketPolicy'),
ObjectLockConfiguration:
require('./lib/models/ObjectLockConfiguration'),
NotificationConfiguration:
require('./lib/models/NotificationConfiguration'),
},
metrics: {
StatsClient: require('./lib/metrics/StatsClient'),
StatsModel: require('./lib/metrics/StatsModel'),
RedisClient: require('./lib/metrics/RedisClient'),
ZenkoMetrics: require('./lib/metrics/ZenkoMetrics'),
},
pensieve: {
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
},
stream: {
readJSONStreamObject: require('./lib/stream/readJSONStreamObject'),
},
patches: {
locationConstraints: require('./lib/patches/locationConstraints'),
},
};

192
index.ts
View File

@ -1,192 +0,0 @@
export * as auth from './lib/auth/auth'
export { default as constants } from './lib/constants'
export { default as errors } from './lib/errors'
export { default as shuffle } from './lib/shuffle'
export { default as stringHash } from './lib/stringHash'
export * as db from './lib/db'
export * as errorUtils from './lib/errorUtils'
export * as ipCheck from './lib/ipCheck'
export * as jsutil from './lib/jsutil'
export * as https from './lib/https'
// algorithms: {
// list: require('./lib/algos/list/exportAlgos'),
// listTools: {
// DelimiterTools: require('./lib/algos/list/tools'),
// },
// cache: {
// LRUCache: require('./lib/algos/cache/LRUCache'),
// },
// stream: {
// MergeStream: require('./lib/algos/stream/MergeStream'),
// },
// SortedSet: require('./lib/algos/set/SortedSet'),
// },
// policies: {
// evaluators: require('./lib/policyEvaluator/evaluator.js'),
// validateUserPolicy: require('./lib/policy/policyValidator')
// .validateUserPolicy,
// evaluatePrincipal: require('./lib/policyEvaluator/principal'),
// RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
// requestUtils: require('./lib/policyEvaluator/requestUtils'),
// actionMaps: require('./lib/policyEvaluator/utils/actionMaps'),
// },
// Clustering: require('./lib/Clustering'),
// testing: {
// matrix: require('./lib/testing/matrix.js'),
// },
// versioning: {
// VersioningConstants: require('./lib/versioning/constants.js')
// .VersioningConstants,
// Version: require('./lib/versioning/Version.js').Version,
// VersionID: require('./lib/versioning/VersionID.js'),
// },
// network: {
// http: {
// server: require('./lib/network/http/server'),
// },
// rpc: require('./lib/network/rpc/rpc'),
// level: require('./lib/network/rpc/level-net'),
// rest: {
// RESTServer: require('./lib/network/rest/RESTServer'),
// RESTClient: require('./lib/network/rest/RESTClient'),
// },
// RoundRobin: require('./lib/network/RoundRobin'),
// probe: {
// ProbeServer: require('./lib/network/probe/ProbeServer'),
// HealthProbeServer:
// require('./lib/network/probe/HealthProbeServer.js'),
// Utils: require('./lib/network/probe/Utils.js'),
// },
// kmip: require('./lib/network/kmip'),
// kmipClient: require('./lib/network/kmip/Client'),
// },
// s3routes: {
// routes: require('./lib/s3routes/routes'),
// routesUtils: require('./lib/s3routes/routesUtils'),
// },
// s3middleware: {
// userMetadata: require('./lib/s3middleware/userMetadata'),
// convertToXml: require('./lib/s3middleware/convertToXml'),
// escapeForXml: require('./lib/s3middleware/escapeForXml'),
// objectLegalHold: require('./lib/s3middleware/objectLegalHold'),
// tagging: require('./lib/s3middleware/tagging'),
// checkDateModifiedHeaders:
// require('./lib/s3middleware/validateConditionalHeaders')
// .checkDateModifiedHeaders,
// validateConditionalHeaders:
// require('./lib/s3middleware/validateConditionalHeaders')
// .validateConditionalHeaders,
// MD5Sum: require('./lib/s3middleware/MD5Sum'),
// NullStream: require('./lib/s3middleware/nullStream'),
// objectUtils: require('./lib/s3middleware/objectUtils'),
// azureHelper: {
// mpuUtils:
// require('./lib/s3middleware/azureHelpers/mpuUtils'),
// ResultsCollector:
// require('./lib/s3middleware/azureHelpers/ResultsCollector'),
// SubStreamInterface:
// require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
// },
// prepareStream: require('./lib/s3middleware/prepareStream'),
// processMpuParts: require('./lib/s3middleware/processMpuParts'),
// retention: require('./lib/s3middleware/objectRetention'),
// lifecycleHelpers: require('./lib/s3middleware/lifecycleHelpers'),
// },
// storage: {
// metadata: {
// MetadataWrapper: require('./lib/storage/metadata/MetadataWrapper'),
// bucketclient: {
// BucketClientInterface:
// require('./lib/storage/metadata/bucketclient/' +
// 'BucketClientInterface'),
// LogConsumer:
// require('./lib/storage/metadata/bucketclient/LogConsumer'),
// },
// file: {
// BucketFileInterface:
// require('./lib/storage/metadata/file/BucketFileInterface'),
// MetadataFileServer:
// require('./lib/storage/metadata/file/MetadataFileServer'),
// MetadataFileClient:
// require('./lib/storage/metadata/file/MetadataFileClient'),
// },
// inMemory: {
// metastore:
// require('./lib/storage/metadata/in_memory/metastore'),
// metadata: require('./lib/storage/metadata/in_memory/metadata'),
// bucketUtilities:
// require('./lib/storage/metadata/in_memory/bucket_utilities'),
// },
// mongoclient: {
// MongoClientInterface:
// require('./lib/storage/metadata/mongoclient/' +
// 'MongoClientInterface'),
// LogConsumer:
// require('./lib/storage/metadata/mongoclient/LogConsumer'),
// },
// proxy: {
// Server: require('./lib/storage/metadata/proxy/Server'),
// },
// },
// data: {
// DataWrapper: require('./lib/storage/data/DataWrapper'),
// MultipleBackendGateway:
// require('./lib/storage/data/MultipleBackendGateway'),
// parseLC: require('./lib/storage/data/LocationConstraintParser'),
// file: {
// DataFileStore:
// require('./lib/storage/data/file/DataFileStore'),
// DataFileInterface:
// require('./lib/storage/data/file/DataFileInterface'),
// },
// external: {
// AwsClient: require('./lib/storage/data/external/AwsClient'),
// AzureClient: require('./lib/storage/data/external/AzureClient'),
// GcpClient: require('./lib/storage/data/external/GcpClient'),
// GCP: require('./lib/storage/data/external/GCP/GcpService'),
// GcpUtils: require('./lib/storage/data/external/GCP/GcpUtils'),
// GcpSigner: require('./lib/storage/data/external/GCP/GcpSigner'),
// PfsClient: require('./lib/storage/data/external/PfsClient'),
// backendUtils: require('./lib/storage/data/external/utils'),
// },
// inMemory: {
// datastore: require('./lib/storage/data/in_memory/datastore'),
// },
// },
// utils: require('./lib/storage/utils'),
// },
// models: {
// BackendInfo: require('./lib/models/BackendInfo'),
// BucketInfo: require('./lib/models/BucketInfo'),
// BucketAzureInfo: require('./lib/models/BucketAzureInfo'),
// ObjectMD: require('./lib/models/ObjectMD'),
// ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
// ObjectMDAzureInfo: require('./lib/models/ObjectMDAzureInfo'),
// ARN: require('./lib/models/ARN'),
// WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
// ReplicationConfiguration:
// require('./lib/models/ReplicationConfiguration'),
// LifecycleConfiguration:
// require('./lib/models/LifecycleConfiguration'),
// LifecycleRule: require('./lib/models/LifecycleRule'),
// BucketPolicy: require('./lib/models/BucketPolicy'),
// ObjectLockConfiguration:
// require('./lib/models/ObjectLockConfiguration'),
// NotificationConfiguration:
// require('./lib/models/NotificationConfiguration'),
// },
// metrics: {
// StatsClient: require('./lib/metrics/StatsClient'),
// StatsModel: require('./lib/metrics/StatsModel'),
// RedisClient: require('./lib/metrics/RedisClient'),
// ZenkoMetrics: require('./lib/metrics/ZenkoMetrics'),
// },
// pensieve: {
// credentialUtils: require('./lib/executables/pensieveCreds/utils'),
// },
// stream: {
// readJSONStreamObject: require('./lib/stream/readJSONStreamObject'),
// },
// patches: {
// locationConstraints: require('./lib/patches/locationConstraints'),
// },

View File

@ -1,6 +1,8 @@
import * as cluster from 'cluster';
'use strict'; // eslint-disable-line
export default class Clustering {
const cluster = require('cluster');
class Clustering {
/**
* Constructor
*
@ -257,3 +259,5 @@ export default class Clustering {
});
}
}
module.exports = Clustering;

View File

@ -1,4 +1,4 @@
import assert from 'assert';
const assert = require('assert');
/**
* @class
@ -6,19 +6,13 @@ import assert from 'assert';
* number of items and a Least Recently Used (LRU) strategy for
* eviction.
*/
export default class LRUCache {
_maxEntries;
_entryMap;
_entryCount;
_lruTail;
_lruHead;
class LRUCache {
/**
* @constructor
* @param maxEntries - maximum number of entries kept in
* @param {number} maxEntries - maximum number of entries kept in
* the cache
*/
constructor(maxEntries: number) {
constructor(maxEntries) {
assert(maxEntries >= 1);
this._maxEntries = maxEntries;
this.clear();
@ -28,12 +22,12 @@ export default class LRUCache {
* Add or update the value associated to a key in the cache,
* making it the most recently accessed for eviction purpose.
*
* @param key - key to add
* @param value - associated value (can be of any type)
* @return true if the cache contained an entry with
* @param {string} key - key to add
* @param {object} value - associated value (can be of any type)
* @return {boolean} true if the cache contained an entry with
* this key, false if it did not
*/
add(key: string, value): boolean {
add(key, value) {
let entry = this._entryMap[key];
if (entry) {
entry.value = value;
@ -60,12 +54,12 @@ export default class LRUCache {
* Get the value associated to a key in the cache, making it the
* most recently accessed for eviction purpose.
*
* @param key - key of which to fetch the associated value
* @return returns the associated value if
* @param {string} key - key of which to fetch the associated value
* @return {object|undefined} - returns the associated value if
* exists in the cache, or undefined if not found - either if the
* key was never added or if it has been evicted from the cache.
*/
get(key: string) {
get(key) {
const entry = this._entryMap[key];
if (entry) {
// make the entry the most recently used by re-pushing it
@ -80,12 +74,12 @@ export default class LRUCache {
/**
* Remove an entry from the cache if exists
*
* @param key - key to remove
* @return true if an entry has been removed, false if
* @param {string} key - key to remove
* @return {boolean} true if an entry has been removed, false if
* there was no entry with this key in the cache - either if the
* key was never added or if it has been evicted from the cache.
*/
remove(key: string): boolean {
remove(key) {
const entry = this._entryMap[key];
if (entry) {
this._removeEntry(entry);
@ -97,14 +91,16 @@ export default class LRUCache {
/**
* Get the current number of cached entries
*
* @return current number of cached entries
* @return {number} current number of cached entries
*/
count(): number {
count() {
return this._entryCount;
}
/**
* Remove all entries from the cache
*
* @return {undefined}
*/
clear() {
this._entryMap = {};
@ -117,7 +113,8 @@ export default class LRUCache {
* Push an entry to the front of the LRU list, making it the most
* recently accessed
*
* @param entry - entry to push
* @param {object} entry - entry to push
* @return {undefined}
*/
_lruPushEntry(entry) {
/* eslint-disable no-param-reassign */
@ -136,7 +133,8 @@ export default class LRUCache {
/**
* Remove an entry from the LRU list
*
* @param entry - entry to remove
* @param {object} entry - entry to remove
* @return {undefined}
*/
_lruRemoveEntry(entry) {
/* eslint-disable no-param-reassign */
@ -156,7 +154,8 @@ export default class LRUCache {
/**
* Helper function to remove an existing entry from the cache
*
* @param entry - cache entry to remove
* @param {object} entry - cache entry to remove
* @return {undefined}
*/
_removeEntry(entry) {
this._lruRemoveEntry(entry);
@ -164,3 +163,5 @@ export default class LRUCache {
this._entryCount -= 1;
}
}
module.exports = LRUCache;

View File

@ -1,4 +1,7 @@
import { FILTER_SKIP, SKIP_NONE } from './tools';
'use strict'; // eslint-disable-line strict
const { FILTER_SKIP, SKIP_NONE } = require('./tools');
// Use a heuristic to amortize the cost of JSON
// serialization/deserialization only on largest metadata where the
// potential for size reduction is high, considering the bulk of the
@ -19,12 +22,7 @@ const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
/**
* Base class of listing extensions.
*/
export default class Extension {
parameters;
logger;
res?: any[];
keys: number;
class Extension {
/**
* This takes a list of parameters and a logger as the inputs.
* Derivatives should have their own format regarding parameters.
@ -53,14 +51,14 @@ export default class Extension {
* heavy unused fields, or left untouched (depending on size
* heuristics)
*/
trimMetadata(value: string): string {
let ret: any = undefined;
trimMetadata(value) {
let ret = undefined;
if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
try {
ret = JSON.parse(value);
delete ret.location;
ret = JSON.stringify(ret);
} catch (e: any) {
} catch (e) {
// Prefer returning an unfiltered data rather than
// stopping the service in case of parsing failure.
// The risk of this approach is a potential
@ -68,8 +66,7 @@ export default class Extension {
// used by repd.
this.logger.warn(
'Could not parse Object Metadata while listing',
{ err: e.toString() }
);
{ err: e.toString() });
}
}
return ret || value;
@ -99,7 +96,7 @@ export default class Extension {
* = 0: entry is accepted but not included (skipping)
* < 0: entry is not accepted, listing should finish
*/
filter(entry): number {
filter(entry) {
return entry ? FILTER_SKIP : FILTER_SKIP;
}
@ -108,18 +105,20 @@ export default class Extension {
* because it is skipping a range of delimited keys or a range of specific
* version when doing master version listing.
*
* @return the insight: a common prefix or a master key,
* @return {string} - the insight: a common prefix or a master key,
* or SKIP_NONE if there is no insight
*/
skipping(): string | undefined {
skipping() {
return SKIP_NONE;
}
/**
* Get the listing resutls. Format depends on derivatives' specific logic.
* @return The listed elements
* @return {Array} - The listed elements
*/
result() {
return this.res;
}
}
module.exports.default = Extension;

View File

@ -1,12 +1,9 @@
import {
inc,
checkLimit,
listingParamsMasterKeysV0ToV1,
FILTER_END,
FILTER_ACCEPT,
} from './tools';
'use strict'; // eslint-disable-line strict
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT } = require('./tools');
const DEFAULT_MAX_KEYS = 1000;
import { VersioningConstants as VSConst } from '../../versioning/constants';
const VSConst = require('../../versioning/constants').VersioningConstants;
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
function numberDefault(num, defaultNum) {
@ -17,22 +14,7 @@ function numberDefault(num, defaultNum) {
/**
* Class for the MultipartUploads extension
*/
export class MultipartUploads {
params
vFormat
CommonPrefixes
Uploads
IsTruncated
NextKeyMarker
NextUploadIdMarker
prefixLength
queryPrefixLength
keys
maxKeys
delimiter
splitter
logger
class MultipartUploads {
/**
* Constructor of the extension
* Init and check parameters
@ -57,26 +39,22 @@ export class MultipartUploads {
this.splitter = params.splitter;
this.logger = logger;
Object.assign(
this,
{
[BucketVersioningKeyFormat.v0]: {
genMDParams: this.genMDParamsV0,
getObjectKey: this.getObjectKeyV0,
},
[BucketVersioningKeyFormat.v1]: {
genMDParams: this.genMDParamsV1,
getObjectKey: this.getObjectKeyV1,
},
}[this.vFormat]
);
Object.assign(this, {
[BucketVersioningKeyFormat.v0]: {
genMDParams: this.genMDParamsV0,
getObjectKey: this.getObjectKeyV0,
},
[BucketVersioningKeyFormat.v1]: {
genMDParams: this.genMDParamsV1,
getObjectKey: this.getObjectKeyV1,
},
}[this.vFormat]);
}
genMDParamsV0() {
const params = {};
if (this.params.keyMarker) {
params.gt =
`overview${this.params.splitter}` +
params.gt = `overview${this.params.splitter}` +
`${this.params.keyMarker}${this.params.splitter}`;
if (this.params.uploadIdMarker) {
params.gt += `${this.params.uploadIdMarker}`;
@ -169,20 +147,14 @@ export class MultipartUploads {
if (this.delimiter) {
const mpuPrefixSlice = `overview${this.splitter}`.length;
const mpuKey = key.slice(mpuPrefixSlice);
const commonPrefixIndex = mpuKey.indexOf(
this.delimiter,
this.queryPrefixLength
);
const commonPrefixIndex = mpuKey.indexOf(this.delimiter,
this.queryPrefixLength);
if (commonPrefixIndex === -1) {
this.addUpload(value);
} else {
this.addCommonPrefix(
mpuKey.substring(
0,
commonPrefixIndex + this.delimiter.length
)
);
this.addCommonPrefix(mpuKey.substring(0,
commonPrefixIndex + this.delimiter.length));
}
} else {
this.addUpload(value);
@ -210,3 +182,7 @@ export class MultipartUploads {
};
}
}
module.exports = {
MultipartUploads,
};

View File

@ -1,17 +1,14 @@
import Extension from './Extension';
import { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } from './tools';
'use strict'; // eslint-disable-line strict
const Extension = require('./Extension').default;
const { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
const DEFAULT_MAX_KEYS = 10000;
/**
* Class of an extension doing the simple listing
*/
export class List extends Extension {
maxKeys: number;
filterKey;
filterKeyStartsWith;
res: any[];
class List extends Extension {
/**
* Constructor
* Set the logger and the res
@ -33,17 +30,15 @@ export class List extends Extension {
}
genMDParams() {
const params = this.parameters
? {
gt: this.parameters.gt,
gte: this.parameters.gte || this.parameters.start,
lt: this.parameters.lt,
lte: this.parameters.lte || this.parameters.end,
keys: this.parameters.keys,
values: this.parameters.values,
}
: {};
Object.keys(params).forEach((key) => {
const params = this.parameters ? {
gt: this.parameters.gt,
gte: this.parameters.gte || this.parameters.start,
lt: this.parameters.lt,
lte: this.parameters.lte || this.parameters.end,
keys: this.parameters.keys,
values: this.parameters.values,
} : {};
Object.keys(params).forEach(key => {
if (params[key] === null || params[key] === undefined) {
delete params[key];
}
@ -58,30 +53,29 @@ export class List extends Extension {
*
* @return {Boolean} Returns true if matches, else false.
*/
customFilter(value: string): boolean {
let _value: any;
customFilter(value) {
let _value;
try {
_value = JSON.parse(value);
} catch (e: any) {
} catch (e) {
// Prefer returning an unfiltered data rather than
// stopping the service in case of parsing failure.
// The risk of this approach is a potential
// reproduction of MD-692, where too much memory is
// used by repd.
this.logger.warn('Could not parse Object Metadata while listing', {
err: e.toString(),
});
this.logger.warn(
'Could not parse Object Metadata while listing',
{ err: e.toString() });
return false;
}
if (_value.customAttributes !== undefined) {
for (const key of Object.keys(_value.customAttributes)) {
if (this.filterKey !== undefined && key === this.filterKey) {
if (this.filterKey !== undefined &&
key === this.filterKey) {
return true;
}
if (
this.filterKeyStartsWith !== undefined &&
key.startsWith(this.filterKeyStartsWith)
) {
if (this.filterKeyStartsWith !== undefined &&
key.startsWith(this.filterKeyStartsWith)) {
return true;
}
}
@ -96,17 +90,15 @@ export class List extends Extension {
* @return {number} - > 0 : continue listing
* < 0 : listing done
*/
filter(elem): number {
filter(elem) {
// Check first in case of maxkeys <= 0
if (this.keys >= this.maxKeys) {
return FILTER_END;
}
if (
(this.filterKey !== undefined ||
this.filterKeyStartsWith !== undefined) &&
if ((this.filterKey !== undefined ||
this.filterKeyStartsWith !== undefined) &&
typeof elem === 'object' &&
!this.customFilter(elem.value)
) {
!this.customFilter(elem.value)) {
return FILTER_SKIP;
}
if (typeof elem === 'object') {
@ -129,3 +121,7 @@ export class List extends Extension {
return this.res;
}
}
module.exports = {
List,
};

View File

@ -1,12 +1,9 @@
import Extension from './Extension';
import {
inc,
listingParamsMasterKeysV0ToV1,
FILTER_END,
FILTER_ACCEPT,
FILTER_SKIP,
} from './tools';
import { VersioningConstants as VSConst } from '../../versioning/constants';
'use strict'; // eslint-disable-line strict
const Extension = require('./Extension').default;
const { inc, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
const VSConst = require('../../versioning/constants').VersioningConstants;
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
/**
@ -17,11 +14,7 @@ const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
* @param {Number} delimiterIndex - 'folder' index in the path
* @return {String} - CommonPrefix
*/
function getCommonPrefix(
key: string,
delimiter: string,
delimiterIndex: number
): string {
function getCommonPrefix(key, delimiter, delimiterIndex) {
return key.substring(0, delimiterIndex + delimiter.length);
}
@ -37,25 +30,7 @@ function getCommonPrefix(
* @prop {String|undefined} prefix - prefix per amazon format
* @prop {Number} maxKeys - number of keys to list
*/
export class Delimiter extends Extension {
CommonPrefixes: string[];
Contents: string[];
IsTruncated: boolean;
NextMarker?: string;
keys: number;
delimiter?: string;
prefix?: string;
maxKeys: number;
marker;
startAfter;
continuationToken;
alphabeticalOrder;
vFormat;
NextContinuationToken;
startMarker;
continueMarker;
nextContinueMarker;
class Delimiter extends Extension {
/**
* Create a new Delimiter instance
* @constructor
@ -83,7 +58,6 @@ export class Delimiter extends Extension {
constructor(parameters, logger, vFormat) {
super(parameters, logger);
// original listing parameters
this.keys = 0;
this.delimiter = parameters.delimiter;
this.prefix = parameters.prefix;
this.marker = parameters.marker;
@ -91,9 +65,8 @@ export class Delimiter extends Extension {
this.startAfter = parameters.startAfter;
this.continuationToken = parameters.continuationToken;
this.alphabeticalOrder =
typeof parameters.alphabeticalOrder !== 'undefined'
? parameters.alphabeticalOrder
: true;
typeof parameters.alphabeticalOrder !== 'undefined' ?
parameters.alphabeticalOrder : true;
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
// results
@ -106,44 +79,36 @@ export class Delimiter extends Extension {
this.startMarker = parameters.v2 ? 'startAfter' : 'marker';
this.continueMarker = parameters.v2 ? 'continuationToken' : 'marker';
this.nextContinueMarker = parameters.v2
? 'NextContinuationToken'
: 'NextMarker';
this.nextContinueMarker = parameters.v2 ?
'NextContinuationToken' : 'NextMarker';
if (
this.delimiter !== undefined &&
if (this.delimiter !== undefined &&
this[this.nextContinueMarker] !== undefined &&
this[this.nextContinueMarker].startsWith(this.prefix || '')
) {
const nextDelimiterIndex = this[this.nextContinueMarker].indexOf(
this.delimiter,
this.prefix ? this.prefix.length : 0
);
this[this.nextContinueMarker] = this[this.nextContinueMarker].slice(
0,
nextDelimiterIndex + this.delimiter.length
);
this[this.nextContinueMarker].startsWith(this.prefix || '')) {
const nextDelimiterIndex =
this[this.nextContinueMarker].indexOf(this.delimiter,
this.prefix ? this.prefix.length : 0);
this[this.nextContinueMarker] =
this[this.nextContinueMarker].slice(0, nextDelimiterIndex +
this.delimiter.length);
}
Object.assign(
this,
{
[BucketVersioningKeyFormat.v0]: {
genMDParams: this.genMDParamsV0,
getObjectKey: this.getObjectKeyV0,
skipping: this.skippingV0,
},
[BucketVersioningKeyFormat.v1]: {
genMDParams: this.genMDParamsV1,
getObjectKey: this.getObjectKeyV1,
skipping: this.skippingV1,
},
}[this.vFormat]
);
Object.assign(this, {
[BucketVersioningKeyFormat.v0]: {
genMDParams: this.genMDParamsV0,
getObjectKey: this.getObjectKeyV0,
skipping: this.skippingV0,
},
[BucketVersioningKeyFormat.v1]: {
genMDParams: this.genMDParamsV1,
getObjectKey: this.getObjectKeyV1,
skipping: this.skippingV1,
},
}[this.vFormat]);
}
genMDParamsV0() {
const params: { gte?: string; lt?: string; gt?: string } = {};
const params = {};
if (this.prefix) {
params.gte = this.prefix;
params.lt = inc(this.prefix);
@ -169,7 +134,7 @@ export class Delimiter extends Extension {
* final state of the result if it is the case
* @return {Boolean} - indicates if the iteration has to stop
*/
_reachedMaxKeys(): boolean {
_reachedMaxKeys() {
if (this.keys >= this.maxKeys) {
// In cases of maxKeys <= 0 -> IsTruncated = false
this.IsTruncated = this.maxKeys > 0;
@ -186,7 +151,7 @@ export class Delimiter extends Extension {
* @param {String} value - The value of the key
* @return {number} - indicates if iteration should continue
*/
addContents(key: string, value: string): number {
addContents(key, value) {
if (this._reachedMaxKeys()) {
return FILTER_END;
}
@ -196,11 +161,11 @@ export class Delimiter extends Extension {
return FILTER_ACCEPT;
}
getObjectKeyV0(obj: { key: string }) {
getObjectKeyV0(obj) {
return obj.key;
}
getObjectKeyV1(obj: { key: string }) {
getObjectKeyV1(obj) {
return obj.key.slice(DbPrefixes.Master.length);
}
@ -215,15 +180,13 @@ export class Delimiter extends Extension {
* @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue
*/
filter(obj: { key: string; value: string }): number {
filter(obj) {
const key = this.getObjectKey(obj);
const value = obj.value;
if (
(this.prefix && !key.startsWith(this.prefix)) ||
(this.alphabeticalOrder &&
typeof this[this.nextContinueMarker] === 'string' &&
key <= this[this.nextContinueMarker])
) {
if ((this.prefix && !key.startsWith(this.prefix))
|| (this.alphabeticalOrder
&& typeof this[this.nextContinueMarker] === 'string'
&& key <= this[this.nextContinueMarker])) {
return FILTER_SKIP;
}
if (this.delimiter) {
@ -243,12 +206,10 @@ export class Delimiter extends Extension {
* @param {Number} index - after prefix starting point
* @return {Boolean} - indicates if iteration should continue
*/
addCommonPrefix(key: string, index: number): boolean {
addCommonPrefix(key, index) {
const commonPrefix = getCommonPrefix(key, this.delimiter, index);
if (
this.CommonPrefixes.indexOf(commonPrefix) === -1 &&
this[this.nextContinueMarker] !== commonPrefix
) {
if (this.CommonPrefixes.indexOf(commonPrefix) === -1
&& this[this.nextContinueMarker] !== commonPrefix) {
if (this._reachedMaxKeys()) {
return FILTER_END;
}
@ -267,7 +228,7 @@ export class Delimiter extends Extension {
* @return {string} - the present range (NextMarker) if repd believes
* that it's enough and should move on
*/
skippingV0(): string {
skippingV0() {
return this[this.nextContinueMarker];
}
@ -278,7 +239,7 @@ export class Delimiter extends Extension {
* @return {string} - the present range (NextMarker) if repd believes
* that it's enough and should move on
*/
skippingV1(): string {
skippingV1() {
return DbPrefixes.Master + this[this.nextContinueMarker];
}
@ -300,17 +261,14 @@ export class Delimiter extends Extension {
Delimiter: this.delimiter,
};
if (this.parameters.v2) {
//
result.NextContinuationToken = this.IsTruncated
? this.NextContinuationToken
: undefined;
? this.NextContinuationToken : undefined;
} else {
//
result.NextMarker =
this.IsTruncated && this.delimiter
? this.NextMarker
: undefined;
result.NextMarker = (this.IsTruncated && this.delimiter)
? this.NextMarker : undefined;
}
return result;
}
}
module.exports = { Delimiter };

View File

@ -1,8 +1,10 @@
import { Delimiter } from './delimiter';
import { Version } from '../../versioning/Version';
import { VersioningConstants as VSConst } from '../../versioning/constants';
'use strict'; // eslint-disable-line strict
const Delimiter = require('./delimiter').Delimiter;
const Version = require('../../versioning/Version').Version;
const VSConst = require('../../versioning/constants').VersioningConstants;
const { BucketVersioningKeyFormat } = VSConst;
import { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } from './tools';
const { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes } = VSConst;
@ -11,11 +13,7 @@ const { DbPrefixes } = VSConst;
* Handle object listing with parameters. This extends the base class Delimiter
* to return the raw master versions of existing objects.
*/
export class DelimiterMaster extends Delimiter {
prvKey;
prvPHDKey;
inReplayPrefix;
class DelimiterMaster extends Delimiter {
/**
* Delimiter listing of master versions.
* @param {Object} parameters - listing parameters
@ -36,19 +34,16 @@ export class DelimiterMaster extends Delimiter {
this.prvPHDKey = undefined;
this.inReplayPrefix = false;
Object.assign(
this,
{
[BucketVersioningKeyFormat.v0]: {
filter: this.filterV0,
skipping: this.skippingV0,
},
[BucketVersioningKeyFormat.v1]: {
filter: this.filterV1,
skipping: this.skippingV1,
},
}[this.vFormat]
);
Object.assign(this, {
[BucketVersioningKeyFormat.v0]: {
filter: this.filterV0,
skipping: this.skippingV0,
},
[BucketVersioningKeyFormat.v1]: {
filter: this.filterV1,
skipping: this.skippingV1,
},
}[this.vFormat]);
}
/**
@ -63,7 +58,7 @@ export class DelimiterMaster extends Delimiter {
* @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue
*/
filterV0(obj: { key: string; value: string }): number {
filterV0(obj) {
let key = obj.key;
const value = obj.value;
@ -75,11 +70,9 @@ export class DelimiterMaster extends Delimiter {
/* Skip keys not starting with the prefix or not alphabetically
* ordered. */
if (
(this.prefix && !key.startsWith(this.prefix)) ||
(typeof this[this.nextContinueMarker] === 'string' &&
key <= this[this.nextContinueMarker])
) {
if ((this.prefix && !key.startsWith(this.prefix))
|| (typeof this[this.nextContinueMarker] === 'string' &&
key <= this[this.nextContinueMarker])) {
return FILTER_SKIP;
}
@ -102,12 +95,9 @@ export class DelimiterMaster extends Delimiter {
* NextMarker to the common prefix instead of the whole key
* value. (TODO: remove this test once ZENKO-1048 is fixed)
* */
if (
key === this.prvKey ||
key === this[this.nextContinueMarker] ||
if (key === this.prvKey || key === this[this.nextContinueMarker] ||
(this.delimiter &&
key.startsWith(this[this.nextContinueMarker]))
) {
key.startsWith(this[this.nextContinueMarker]))) {
/* master version already filtered */
return FILTER_SKIP;
}
@ -165,7 +155,7 @@ export class DelimiterMaster extends Delimiter {
* @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue
*/
filterV1(obj: { key: string; value: string }): number {
filterV1(obj) {
// Filtering master keys in v1 is simply listing the master
// keys, as the state of version keys do not change the
// result, so we can use Delimiter method directly.
@ -177,9 +167,8 @@ export class DelimiterMaster extends Delimiter {
// next marker or next continuation token:
// - foo/ : skipping foo/
// - foo : skipping foo.
const index = this[this.nextContinueMarker].lastIndexOf(
this.delimiter
);
const index = this[this.nextContinueMarker].
lastIndexOf(this.delimiter);
if (index === this[this.nextContinueMarker].length - 1) {
return this[this.nextContinueMarker];
}
@ -203,3 +192,5 @@ export class DelimiterMaster extends Delimiter {
return DbPrefixes.Master + skipTo;
}
}
module.exports = { DelimiterMaster };

View File

@ -1,13 +1,10 @@
import { Delimiter } from './delimiter';
import { Version } from '../../versioning/Version';
import { VersioningConstants as VSConst } from '../../versioning/constants';
import {
inc,
FILTER_END,
FILTER_ACCEPT,
FILTER_SKIP,
SKIP_NONE,
} from './tools';
'use strict'; // eslint-disable-line strict
const Delimiter = require('./delimiter').Delimiter;
const Version = require('../../versioning/Version').Version;
const VSConst = require('../../versioning/constants').VersioningConstants;
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
require('./tools');
const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
@ -24,16 +21,7 @@ const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
* @prop {String|undefined} prefix - prefix per amazon format
* @prop {Number} maxKeys - number of keys to list
*/
export class DelimiterVersions extends Delimiter {
CommonPrefixes: string[];
Contents: string[];
IsTruncated: boolean;
NextMarker?: string;
keys: number;
delimiter?: string;
prefix?: string;
maxKeys: number;
class DelimiterVersions extends Delimiter {
constructor(parameters, logger, vFormat) {
super(parameters, logger, vFormat);
// specific to version listing
@ -47,21 +35,18 @@ export class DelimiterVersions extends Delimiter {
this.NextVersionIdMarker = undefined;
this.inReplayPrefix = false;
Object.assign(
this,
{
[BucketVersioningKeyFormat.v0]: {
genMDParams: this.genMDParamsV0,
filter: this.filterV0,
skipping: this.skippingV0,
},
[BucketVersioningKeyFormat.v1]: {
genMDParams: this.genMDParamsV1,
filter: this.filterV1,
skipping: this.skippingV1,
},
}[this.vFormat]
);
Object.assign(this, {
[BucketVersioningKeyFormat.v0]: {
genMDParams: this.genMDParamsV0,
filter: this.filterV0,
skipping: this.skippingV0,
},
[BucketVersioningKeyFormat.v1]: {
genMDParams: this.genMDParamsV1,
filter: this.filterV1,
skipping: this.skippingV1,
},
}[this.vFormat]);
}
genMDParamsV0() {
@ -78,10 +63,9 @@ export class DelimiterVersions extends Delimiter {
if (this.parameters.versionIdMarker) {
// versionIdMarker should always come with keyMarker
// but may not be the other way around
params.gt =
this.parameters.keyMarker +
VID_SEP +
this.parameters.versionIdMarker;
params.gt = this.parameters.keyMarker
+ VID_SEP
+ this.parameters.versionIdMarker;
} else {
params.gt = inc(this.parameters.keyMarker + VID_SEP);
}
@ -105,27 +89,20 @@ export class DelimiterVersions extends Delimiter {
params[1].lt = inc(DbPrefixes.Version); // stop after the last version key
}
if (this.parameters.keyMarker) {
if (
params[1].gte <=
DbPrefixes.Version + this.parameters.keyMarker
) {
if (params[1].gte <= DbPrefixes.Version + this.parameters.keyMarker) {
delete params[0].gte;
delete params[1].gte;
params[0].gt =
DbPrefixes.Master +
inc(this.parameters.keyMarker + VID_SEP);
params[0].gt = DbPrefixes.Master + inc(this.parameters.keyMarker + VID_SEP);
if (this.parameters.versionIdMarker) {
// versionIdMarker should always come with keyMarker
// but may not be the other way around
params[1].gt =
DbPrefixes.Version +
this.parameters.keyMarker +
VID_SEP +
this.parameters.versionIdMarker;
params[1].gt = DbPrefixes.Version
+ this.parameters.keyMarker
+ VID_SEP
+ this.parameters.versionIdMarker;
} else {
params[1].gt =
DbPrefixes.Version +
inc(this.parameters.keyMarker + VID_SEP);
params[1].gt = DbPrefixes.Version
+ inc(this.parameters.keyMarker + VID_SEP);
}
}
}
@ -143,7 +120,7 @@ export class DelimiterVersions extends Delimiter {
* * -1 if master key < version key
* * 1 if master key > version key
*/
compareObjects(masterObj, versionObj): number {
compareObjects(masterObj, versionObj) {
const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
return masterKey < versionKey ? -1 : 1;
@ -159,11 +136,7 @@ export class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the key
* @return {Boolean} - indicates if iteration should continue
*/
addContents(obj: {
key: string;
versionId: string;
value: string;
}): boolean {
addContents(obj) {
if (this._reachedMaxKeys()) {
return FILTER_END;
}
@ -190,7 +163,7 @@ export class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue
*/
filterV0(obj: { key: string; value: string }): number {
filterV0(obj) {
if (obj.key.startsWith(DbPrefixes.Replay)) {
this.inReplayPrefix = true;
return FILTER_SKIP;
@ -216,14 +189,12 @@ export class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue
*/
filterV1(obj: { key: string; value: string }): number {
filterV1(obj) {
// this function receives both M and V keys, but their prefix
// length is the same so we can remove their prefix without
// looking at the type of key
return this.filterCommon(
obj.key.slice(DbPrefixes.Master.length),
obj.value
);
return this.filterCommon(obj.key.slice(DbPrefixes.Master.length),
obj.value);
}
filterCommon(key, value) {
@ -236,16 +207,14 @@ export class DelimiterVersions extends Delimiter {
if (versionIdIndex < 0) {
nonversionedKey = key;
this.masterKey = key;
this.masterVersionId = Version.from(value).getVersionId() || 'null';
this.masterVersionId =
Version.from(value).getVersionId() || 'null';
versionId = this.masterVersionId;
} else {
nonversionedKey = key.slice(0, versionIdIndex);
versionId = key.slice(versionIdIndex + 1);
// skip a version key if it is the master version
if (
this.masterKey === nonversionedKey &&
this.masterVersionId === versionId
) {
if (this.masterKey === nonversionedKey && this.masterVersionId === versionId) {
return FILTER_SKIP;
}
this.masterKey = undefined;
@ -253,10 +222,7 @@ export class DelimiterVersions extends Delimiter {
}
if (this.delimiter) {
const baseIndex = this.prefix ? this.prefix.length : 0;
const delimiterIndex = nonversionedKey.indexOf(
this.delimiter,
baseIndex
);
const delimiterIndex = nonversionedKey.indexOf(this.delimiter, baseIndex);
if (delimiterIndex >= 0) {
return this.addCommonPrefix(nonversionedKey, delimiterIndex);
}
@ -283,7 +249,8 @@ export class DelimiterVersions extends Delimiter {
return SKIP_NONE;
}
// skip to the same object key in both M and V range listings
return [DbPrefixes.Master + skipV0, DbPrefixes.Version + skipV0];
return [DbPrefixes.Master + skipV0,
DbPrefixes.Version + skipV0];
}
/**
@ -302,10 +269,11 @@ export class DelimiterVersions extends Delimiter {
Versions: this.Contents,
IsTruncated: this.IsTruncated,
NextKeyMarker: this.IsTruncated ? this.NextMarker : undefined,
NextVersionIdMarker: this.IsTruncated
? this.NextVersionIdMarker
: undefined,
NextVersionIdMarker: this.IsTruncated ?
this.NextVersionIdMarker : undefined,
Delimiter: this.delimiter,
};
}
}
module.exports = { DelimiterVersions };

View File

@ -1,25 +1,21 @@
import assert from 'assert';
import { FILTER_END, FILTER_SKIP, SKIP_NONE } from './tools';
const assert = require('assert');
const { FILTER_END, FILTER_SKIP, SKIP_NONE } = require('./tools');
const MAX_STREAK_LENGTH = 100;
/**
* Handle the filtering and the skip mechanism of a listing result.
*/
export class Skip {
extension;
gteParams;
listingEndCb;
skipRangeCb;
streakLength;
class Skip {
/**
* @param {Object} params - skip parameters
* @param {Object} params.extension - delimiter extension used (required)
* @param {String} params.gte - current range gte (greater than or
* equal) used by the client code
*/
constructor(params: { extension: any; gte: string }) {
constructor(params) {
assert(params.extension);
this.extension = params.extension;
@ -51,7 +47,7 @@ export class Skip {
* This function calls the listing end or the skip range callbacks if
* needed.
*/
filter(entry): void {
filter(entry) {
assert(this.listingEndCb);
assert(this.skipRangeCb);
@ -60,10 +56,8 @@ export class Skip {
if (filteringResult === FILTER_END) {
this.listingEndCb();
} else if (
filteringResult === FILTER_SKIP &&
skippingRange !== SKIP_NONE
) {
} else if (filteringResult === FILTER_SKIP
&& skippingRange !== SKIP_NONE) {
if (++this.streakLength >= MAX_STREAK_LENGTH) {
const newRange = this._inc(skippingRange);
@ -79,7 +73,7 @@ export class Skip {
}
}
_inc(str: string) {
_inc(str) {
if (!str) {
return str;
}
@ -89,3 +83,6 @@ export class Skip {
return `${str.slice(0, str.length - 1)}${lastCharNewValue}`;
}
}
module.exports = Skip;

View File

@ -1,11 +1,10 @@
import { VersioningConstants as VSConst } from '../../versioning/constants';
const { DbPrefixes } = VSConst
const { DbPrefixes } = require('../../versioning/constants').VersioningConstants;
// constants for extensions
export const SKIP_NONE = undefined; // to be inline with the values of NextMarker
export const FILTER_ACCEPT = 1;
export const FILTER_SKIP = 0;
export const FILTER_END = -1;
const SKIP_NONE = undefined; // to be inline with the values of NextMarker
const FILTER_ACCEPT = 1;
const FILTER_SKIP = 0;
const FILTER_END = -1;
/**
* This function check if number is valid
@ -16,8 +15,8 @@ export const FILTER_END = -1;
* @param {Number} limit - The limit to respect
* @return {Number} - The parsed number || limit
*/
export function checkLimit(number: number, limit: number): number {
const parsed = Number.parseInt(number, 10)
function checkLimit(number, limit) {
const parsed = Number.parseInt(number, 10);
const valid = !Number.isNaN(parsed) && (!limit || parsed <= limit);
return valid ? parsed : limit;
}
@ -29,7 +28,7 @@ export function checkLimit(number: number, limit: number): number {
* @return {string} - the incremented string
* or the input if it is not valid
*/
export function inc(str: string): string {
function inc(str) {
return str ? (str.slice(0, str.length - 1) +
String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str;
}
@ -41,7 +40,7 @@ export function inc(str: string): string {
* @param {object} v0params - listing parameters for v0 format
* @return {object} - listing parameters for v1 format
*/
export function listingParamsMasterKeysV0ToV1(v0params: any): any {
function listingParamsMasterKeysV0ToV1(v0params) {
const v1params = Object.assign({}, v0params);
if (v0params.gt !== undefined) {
v1params.gt = `${DbPrefixes.Master}${v0params.gt}`;
@ -59,3 +58,13 @@ export function listingParamsMasterKeysV0ToV1(v0params: any): any {
}
return v1params;
}
module.exports = {
checkLimit,
inc,
listingParamsMasterKeysV0ToV1,
SKIP_NONE,
FILTER_END,
FILTER_SKIP,
FILTER_ACCEPT,
};

View File

@ -1,4 +1,4 @@
export function indexOf<T>(arr: T[], value: T) {
function indexOf(arr, value) {
if (!arr.length) {
return -1;
}
@ -22,10 +22,10 @@ export function indexOf<T>(arr: T[], value: T) {
return -1;
}
export function indexAtOrBelow<T>(arr: T[], value: T) {
let i: number;
let lo: number;
let hi: number;
function indexAtOrBelow(arr, value) {
let i;
let lo;
let hi;
if (!arr.length || arr[0] > value) {
return -1;
@ -52,7 +52,7 @@ export function indexAtOrBelow<T>(arr: T[], value: T) {
/*
* perform symmetric diff in O(m + n)
*/
export function symDiff(k1, k2, v1, v2, cb) {
function symDiff(k1, k2, v1, v2, cb) {
let i = 0;
let j = 0;
const n = k1.length;
@ -79,3 +79,9 @@ export function symDiff(k1, k2, v1, v2, cb) {
j++;
}
}
module.exports = {
indexOf,
indexAtOrBelow,
symDiff,
};

View File

@ -1,12 +1,14 @@
import * as ArrayUtils from './ArrayUtils';
const ArrayUtils = require('./ArrayUtils');
export default class SortedSet<Key, Value> {
keys: Key[];
values: Value[];
class SortedSet {
constructor(obj?: { keys: Key[]; values: Value[] }) {
this.keys = obj?.keys ?? [];
this.values = obj?.values ?? [];
constructor(obj) {
if (obj) {
this.keys = obj.keys;
this.values = obj.values;
} else {
this.clear();
}
}
clear() {
@ -18,7 +20,7 @@ export default class SortedSet<Key, Value> {
return this.keys.length;
}
set(key: Key, value: Value) {
set(key, value) {
const index = ArrayUtils.indexAtOrBelow(this.keys, key);
if (this.keys[index] === key) {
this.values[index] = value;
@ -28,17 +30,17 @@ export default class SortedSet<Key, Value> {
this.values.splice(index + 1, 0, value);
}
isSet(key: Key) {
isSet(key) {
const index = ArrayUtils.indexOf(this.keys, key);
return index >= 0;
}
get(key: Key) {
get(key) {
const index = ArrayUtils.indexOf(this.keys, key);
return index >= 0 ? this.values[index] : undefined;
}
del(key: Key) {
del(key) {
const index = ArrayUtils.indexOf(this.keys, key);
if (index >= 0) {
this.keys.splice(index, 1);
@ -46,3 +48,5 @@ export default class SortedSet<Key, Value> {
}
}
}
module.exports = SortedSet;

View File

@ -1,17 +1,7 @@
import stream from 'stream';
const stream = require('stream');
export default class MergeStream extends stream.Readable {
_compare: (a: any, b: any) => number;
_streams: [stream.Readable, stream.Readable];
_peekItems: [undefined | null, undefined | null];
_streamEof: [boolean, boolean];
_streamToResume: stream.Readable | null;
constructor(
stream1: stream.Readable,
stream2: stream.Readable,
compare: (a: any, b: any) => number
) {
class MergeStream extends stream.Readable {
constructor(stream1, stream2, compare) {
super({ objectMode: true });
this._compare = compare;
@ -26,13 +16,13 @@ export default class MergeStream extends stream.Readable {
this._streamEof = [false, false];
this._streamToResume = null;
stream1.on('data', (item) => this._onItem(stream1, item, 0, 1));
stream1.on('data', item => this._onItem(stream1, item, 0, 1));
stream1.once('end', () => this._onEnd(stream1, 0, 1));
stream1.once('error', (err) => this._onError(stream1, err, 0, 1));
stream1.once('error', err => this._onError(stream1, err, 0, 1));
stream2.on('data', (item) => this._onItem(stream2, item, 1, 0));
stream2.on('data', item => this._onItem(stream2, item, 1, 0));
stream2.once('end', () => this._onEnd(stream2, 1, 0));
stream2.once('error', (err) => this._onError(stream2, err, 1, 0));
stream2.once('error', err => this._onError(stream2, err, 1, 0));
}
_read() {
@ -51,7 +41,7 @@ export default class MergeStream extends stream.Readable {
callback();
}
_onItem(myStream: stream.Readable, myItem, myIndex, otherIndex) {
_onItem(myStream, myItem, myIndex, otherIndex) {
this._peekItems[myIndex] = myItem;
const otherItem = this._peekItems[otherIndex];
if (otherItem === undefined) {
@ -79,7 +69,7 @@ export default class MergeStream extends stream.Readable {
return undefined;
}
_onEnd(myStream: stream.Readable, myIndex, otherIndex) {
_onEnd(myStream, myIndex, otherIndex) {
this._streamEof[myIndex] = true;
if (this._peekItems[myIndex] === undefined) {
this._peekItems[myIndex] = null;
@ -104,7 +94,7 @@ export default class MergeStream extends stream.Readable {
return otherStream.resume();
}
_onError(myStream: stream.Readable, err, myIndex, otherIndex) {
_onError(myStream, err, myIndex, otherIndex) {
myStream.destroy();
if (this._streams[otherIndex]) {
this._streams[otherIndex].destroy();
@ -112,3 +102,5 @@ export default class MergeStream extends stream.Readable {
this.emit('error', err);
}
}
module.exports = MergeStream;

View File

@ -1,4 +1,6 @@
import * as constants from '../constants';
'use strict'; // eslint-disable-line strict
const constants = require('../constants');
/**
* Class containing requester's information received from Vault
@ -7,14 +9,7 @@ import * as constants from '../constants';
* @return {AuthInfo} an AuthInfo instance
*/
export default class AuthInfo {
arn
canonicalID
shortid
email
accountDisplayName
IAMdisplayName
class AuthInfo {
constructor(objectFromVault) {
// amazon resource name for IAM user (if applicable)
this.arn = objectFromVault.arn;
@ -55,12 +50,13 @@ export default class AuthInfo {
return this.canonicalID === constants.publicId;
}
isRequesterAServiceAccount() {
return this.canonicalID.startsWith(`${constants.zenkoServiceAccount}/`);
return this.canonicalID.startsWith(
`${constants.zenkoServiceAccount}/`);
}
isRequesterThisServiceAccount(serviceName) {
return (
this.canonicalID ===
`${constants.zenkoServiceAccount}/${serviceName}`
);
return this.canonicalID ===
`${constants.zenkoServiceAccount}/${serviceName}`;
}
}
module.exports = AuthInfo;

View File

@ -1,5 +1,5 @@
import errors from '../errors';
import AuthInfo from './AuthInfo';
const errors = require('../errors');
const AuthInfo = require('./AuthInfo');
/** vaultSignatureCb parses message from Vault and instantiates
* @param {object} err - error from vault
@ -39,10 +39,7 @@ function vaultSignatureCb(err, authInfo, log, callback, streamingV4Params) {
* authentication backends.
* @class Vault
*/
export default class Vault {
client
implName
class Vault {
/**
* @constructor
* @param {object} client - authentication backend or vault client
@ -94,7 +91,7 @@ export default class Vault {
requestContext: serializedRCsArr,
},
(err, userInfo) => vaultSignatureCb(err, userInfo,
params.log, callback),
params.log, callback)
);
}
@ -149,7 +146,7 @@ export default class Vault {
requestContext: serializedRCs,
},
(err, userInfo) => vaultSignatureCb(err, userInfo,
params.log, callback, streamingV4Params),
params.log, callback, streamingV4Params)
);
}
@ -235,28 +232,28 @@ export default class Vault {
*/
getAccountIds(canonicalIDs, log, callback) {
log.trace('getting accountIds from Vault based on canonicalIDs',
{ canonicalIDs });
{ canonicalIDs });
this.client.getAccountIds(canonicalIDs,
{ reqUid: log.getSerializedUids() },
(err, info) => {
if (err) {
log.debug('received error message from vault',
{ errorMessage: err });
return callback(err);
}
const infoFromVault = info.message.body;
log.trace('info received from vault', { infoFromVault });
const result = {};
/* If the accountId was not found in Vault, do not
{ reqUid: log.getSerializedUids() },
(err, info) => {
if (err) {
log.debug('received error message from vault',
{ errorMessage: err });
return callback(err);
}
const infoFromVault = info.message.body;
log.trace('info received from vault', { infoFromVault });
const result = {};
/* If the accountId was not found in Vault, do not
send the canonicalID back to the API */
Object.keys(infoFromVault).forEach(key => {
if (infoFromVault[key] !== 'NotFound' &&
Object.keys(infoFromVault).forEach(key => {
if (infoFromVault[key] !== 'NotFound' &&
infoFromVault[key] !== 'WrongFormat') {
result[key] = infoFromVault[key];
}
});
return callback(null, result);
result[key] = infoFromVault[key];
}
});
return callback(null, result);
});
}
/** checkPolicies -- call Vault to evaluate policies
@ -315,3 +312,5 @@ export default class Vault {
});
}
}
module.exports = Vault;

View File

@ -1,20 +1,22 @@
import * as crypto from 'crypto';
import errors from '../errors';
import * as queryString from 'querystring';
import AuthInfo from './AuthInfo';
import * as v2 from './v2/authV2';
import * as v4 from './v4/authV4';
import * as constants from '../constants';
import constructStringToSignV2 from './v2/constructStringToSign';
import constructStringToSignV4 from './v4/constructStringToSign';
import { convertUTCtoISO8601 } from './v4/timeUtils';
import * as vaultUtilities from './backends/in_memory/vaultUtilities';
import * as inMemoryBackend from './backends/in_memory/Backend';
import validateAuthConfig from './backends/in_memory/validateAuthConfig';
import AuthLoader from './backends/in_memory/AuthLoader';
import Vault from './Vault';
import baseBackend from './backends/base';
import chainBackend from './backends/ChainBackend';
'use strict'; // eslint-disable-line strict
const crypto = require('crypto');
const errors = require('../errors');
const queryString = require('querystring');
const AuthInfo = require('./AuthInfo');
const v2 = require('./v2/authV2');
const v4 = require('./v4/authV4');
const constants = require('../constants');
const constructStringToSignV2 = require('./v2/constructStringToSign');
const constructStringToSignV4 = require('./v4/constructStringToSign');
const convertUTCtoISO8601 = require('./v4/timeUtils').convertUTCtoISO8601;
const vaultUtilities = require('./backends/in_memory/vaultUtilities');
const inMemoryBackend = require('./backends/in_memory/Backend');
const validateAuthConfig = require('./backends/in_memory/validateAuthConfig');
const AuthLoader = require('./backends/in_memory/AuthLoader');
const Vault = require('./Vault');
const baseBackend = require('./backends/base');
const chainBackend = require('./backends/ChainBackend');
let vault = null;
const auth = {};
@ -71,9 +73,8 @@ function extractParams(request, log, awsService, data) {
} else if (authHeader.startsWith('AWS4')) {
version = 'v4';
} else {
log.trace('invalid authorization security header', {
header: authHeader,
});
log.trace('invalid authorization security header',
{ header: authHeader });
return { err: errors.AccessDenied };
}
} else if (data.Signature) {
@ -87,10 +88,8 @@ function extractParams(request, log, awsService, data) {
// Here, either both values are set, or none is set
if (version !== null && method !== null) {
if (!checkFunctions[version] || !checkFunctions[version][method]) {
log.trace('invalid auth version or method', {
version,
authMethod: method,
});
log.trace('invalid auth version or method',
{ version, authMethod: method });
return { err: errors.NotImplemented };
}
log.trace('identified auth method', { version, authMethod: method });
@ -122,11 +121,10 @@ function doAuth(request, log, cb, awsService, requestContexts) {
return cb(null, res.params);
}
if (requestContexts) {
requestContexts.forEach((requestContext) => {
requestContexts.forEach(requestContext => {
requestContext.setAuthType(res.params.data.authType);
requestContext.setSignatureVersion(
res.params.data.signatureVersion
);
requestContext.setSignatureVersion(res.params
.data.signatureVersion);
requestContext.setSignatureAge(res.params.data.signatureAge);
requestContext.setSecurityToken(res.params.data.securityToken);
});
@ -138,12 +136,8 @@ function doAuth(request, log, cb, awsService, requestContexts) {
return vault.authenticateV2Request(res.params, requestContexts, cb);
}
if (res.params.version === 4) {
return vault.authenticateV4Request(
res.params,
requestContexts,
cb,
awsService
);
return vault.authenticateV4Request(res.params, requestContexts, cb,
awsService);
}
log.error('authentication method not found', {
@ -166,22 +160,16 @@ function doAuth(request, log, cb, awsService, requestContexts) {
* are temporary credentials from STS
* @return {undefined}
*/
function generateV4Headers(
request,
data,
accessKey,
secretKeyValue,
awsService,
proxyPath,
sessionToken
) {
function generateV4Headers(request, data, accessKey, secretKeyValue,
awsService, proxyPath, sessionToken) {
Object.assign(request, { headers: {} });
const amzDate = convertUTCtoISO8601(Date.now());
// get date without time
const scopeDate = amzDate.slice(0, amzDate.indexOf('T'));
const region = 'us-east-1';
const service = awsService || 'iam';
const credentialScope = `${scopeDate}/${region}/${service}/aws4_request`;
const credentialScope =
`${scopeDate}/${region}/${service}/aws4_request`;
const timestamp = amzDate;
const algorithm = 'AWS4-HMAC-SHA256';
@ -191,10 +179,8 @@ function generateV4Headers(
encodeURIComponent,
});
}
const payloadChecksum = crypto
.createHash('sha256')
.update(payload, 'binary')
.digest('hex');
const payloadChecksum = crypto.createHash('sha256')
.update(payload, 'binary').digest('hex');
request.setHeader('host', request._headers.host);
request.setHeader('x-amz-date', amzDate);
request.setHeader('x-amz-content-sha256', payloadChecksum);
@ -205,49 +191,47 @@ function generateV4Headers(
Object.assign(request.headers, request._headers);
const signedHeaders = Object.keys(request._headers)
.filter(
(headerName) =>
headerName.startsWith('x-amz-') ||
headerName.startsWith('x-scal-') ||
headerName === 'host'
)
.sort()
.join(';');
const params = {
request,
signedHeaders,
payloadChecksum,
credentialScope,
timestamp,
query: data,
awsService: service,
proxyPath,
};
.filter(headerName =>
headerName.startsWith('x-amz-')
|| headerName.startsWith('x-scal-')
|| headerName === 'host'
).sort().join(';');
const params = { request, signedHeaders, payloadChecksum,
credentialScope, timestamp, query: data,
awsService: service, proxyPath };
const stringToSign = constructStringToSignV4(params);
const signingKey = vaultUtilities.calculateSigningKey(
secretKeyValue,
region,
scopeDate,
service
);
const signature = crypto
.createHmac('sha256', signingKey)
.update(stringToSign, 'binary')
.digest('hex');
const authorizationHeader =
`${algorithm} Credential=${accessKey}` +
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue,
region,
scopeDate,
service);
const signature = crypto.createHmac('sha256', signingKey)
.update(stringToSign, 'binary').digest('hex');
const authorizationHeader = `${algorithm} Credential=${accessKey}` +
`/${credentialScope}, SignedHeaders=${signedHeaders}, ` +
`Signature=${signature}`;
request.setHeader('authorization', authorizationHeader);
Object.assign(request, { headers: {} });
}
export const server = { extractParams, doAuth };
export const client = { generateV4Headers, constructStringToSignV2 };
export const inMemory = {
backend: inMemoryBackend,
validateAuthConfig,
AuthLoader,
module.exports = {
setHandler: setAuthHandler,
server: {
extractParams,
doAuth,
},
client: {
generateV4Headers,
constructStringToSignV2,
},
inMemory: {
backend: inMemoryBackend,
validateAuthConfig,
AuthLoader,
},
backends: {
baseBackend,
chainBackend,
},
AuthInfo,
Vault,
};
export const backends = { baseBackend, chainBackend };
export { setAuthHandler as setHandler, AuthInfo, Vault };

View File

@ -1,8 +1,10 @@
import assert from 'assert';
import async from 'async';
'use strict'; // eslint-disable-line strict
import errors from '../../errors';
import BaseBackend from './base';
const assert = require('assert');
const async = require('async');
const errors = require('../../errors');
const BaseBackend = require('./base');
/**
* Class that provides an authentication backend that will verify signatures
@ -11,15 +13,13 @@ import BaseBackend from './base';
*
* @class ChainBackend
*/
export default class ChainBackend extends BaseBackend {
_clients: any[];
class ChainBackend extends BaseBackend {
/**
* @constructor
* @param {string} service - service id
* @param {object[]} clients - list of authentication backends or vault clients
*/
constructor(service: string, clients: any[]) {
constructor(service, clients) {
super(service);
assert(Array.isArray(clients) && clients.length > 0, 'invalid client list');
@ -34,6 +34,7 @@ export default class ChainBackend extends BaseBackend {
this._clients = clients;
}
/*
* try task against each client for one to be successful
*/
@ -61,20 +62,19 @@ export default class ChainBackend extends BaseBackend {
verifySignatureV4(stringToSign, signatureFromRequest, accessKey, region, scopeDate, options, callback) {
this._tryEachClient((client, done) => client.verifySignatureV4(
stringToSign,
signatureFromRequest,
accessKey,
region,
scopeDate,
options,
done
signatureFromRequest,
accessKey,
region,
scopeDate,
options,
done
), callback);
}
static _mergeObjects(objectResponses) {
return objectResponses.reduce(
(retObj, resObj) => Object.assign(retObj, resObj.message.body),
{}
);
{});
}
getCanonicalIds(emailAddresses, options, callback) {
@ -90,8 +90,7 @@ export default class ChainBackend extends BaseBackend {
body: ChainBackend._mergeObjects(res),
},
});
}
);
});
}
getEmailAddresses(canonicalIDs, options, callback) {
@ -106,8 +105,7 @@ export default class ChainBackend extends BaseBackend {
body: ChainBackend._mergeObjects(res),
},
});
}
);
});
}
/*
@ -130,7 +128,7 @@ export default class ChainBackend extends BaseBackend {
});
});
return Object.keys(policyMap).map((key) => {
return Object.keys(policyMap).map(key => {
const policyRes = { isAllowed: policyMap[key].isAllowed };
if (policyMap[key].arn !== '') {
policyRes.arn = policyMap[key].arn;
@ -174,12 +172,12 @@ export default class ChainBackend extends BaseBackend {
error: !!err ? err : null,
status: res,
})
), (err, res) => {
), (err, res) => {
if (err) {
return callback(err);
}
const isError = res.some((results) => !!results.error);
const isError = res.some(results => !!results.error);
if (isError) {
return callback(errors.InternalError, res);
}
@ -187,3 +185,5 @@ export default class ChainBackend extends BaseBackend {
});
}
}
module.exports = ChainBackend;

View File

@ -1,13 +1,13 @@
import errors from '../../errors';
'use strict'; // eslint-disable-line strict
const errors = require('../../errors');
/**
* Base backend class
*
* @class BaseBackend
*/
export default class BaseBackend {
service
class BaseBackend {
/**
* @constructor
* @param {string} service - service identifer for construction arn
@ -82,3 +82,5 @@ export default class BaseBackend {
return callback(null, { code: 200, message: 'OK' });
}
}
module.exports = BaseBackend;

View File

@ -1,9 +1,9 @@
import * as fs from 'fs';
import glob from 'simple-glob';
import joi from '@hapi/joi';
import werelogs from 'werelogs';
const fs = require('fs');
const glob = require('simple-glob');
const joi = require('@hapi/joi');
const werelogs = require('werelogs');
import ARN from '../../../models/ARN';
const ARN = require('../../../models/ARN');
/**
* Load authentication information from files or pre-loaded account
@ -11,46 +11,35 @@ import ARN from '../../../models/ARN';
*
* @class AuthLoader
*/
export default class AuthLoader {
_log
_authData
_isValid
_joiKeysValidator
_joiValidator
class AuthLoader {
constructor(logApi) {
this._log = new (logApi || werelogs).Logger('S3');
this._authData = { accounts: [] };
// null: unknown validity, true/false: valid or invalid
this._isValid = null;
this._joiKeysValidator = joi
.array()
this._joiKeysValidator = joi.array()
.items({
access: joi.string().required(),
secret: joi.string().required(),
})
.required();
const accountsJoi = joi
.array()
.items({
name: joi.string().required(),
email: joi.string().email().required(),
arn: joi.string().required(),
canonicalID: joi.string().required(),
shortid: joi
.string()
.regex(/^[0-9]{12}$/)
.required(),
keys: this._joiKeysValidator,
// backward-compat
users: joi.array(),
})
.required()
.unique('arn')
.unique('email')
.unique('canonicalID');
const accountsJoi = joi.array()
.items({
name: joi.string().required(),
email: joi.string().email().required(),
arn: joi.string().required(),
canonicalID: joi.string().required(),
shortid: joi.string().regex(/^[0-9]{12}$/).required(),
keys: this._joiKeysValidator,
// backward-compat
users: joi.array(),
})
.required()
.unique('arn')
.unique('email')
.unique('canonicalID');
this._joiValidator = joi.object({ accounts: accountsJoi });
}
@ -75,12 +64,11 @@ export default class AuthLoader {
* logging purpose
* @return {undefined}
*/
addAccounts(authData, filePath: string) {
addAccounts(authData, filePath) {
const isValid = this._validateData(authData, filePath);
if (isValid) {
this._authData.accounts = this._authData.accounts.concat(
authData.accounts
);
this._authData.accounts =
this._authData.accounts.concat(authData.accounts);
// defer validity checking when getting data to avoid
// logging multiple times the errors (we need to validate
// all accounts at once to detect duplicate values)
@ -99,7 +87,7 @@ export default class AuthLoader {
* authentication info (see {@link addAccounts()} for format)
* @return {undefined}
*/
addFile(filePath: string) {
addFile(filePath) {
const authData = JSON.parse(fs.readFileSync(filePath));
this.addAccounts(authData, filePath);
}
@ -115,9 +103,9 @@ export default class AuthLoader {
* {@link addAccounts()} for JSON format.
* @return {undefined}
*/
addFilesByGlob(globPattern: string | string[]) {
addFilesByGlob(globPattern) {
const files = glob(globPattern);
files.forEach((filePath) => this.addFile(filePath));
files.forEach(filePath => this.addFile(filePath));
}
/**
@ -146,10 +134,9 @@ export default class AuthLoader {
return this.validate() ? this._authData : null;
}
_validateData(authData, filePath?: string) {
const res = joi.validate(authData, this._joiValidator, {
abortEarly: false,
});
_validateData(authData, filePath) {
const res = joi.validate(authData, this._joiValidator,
{ abortEarly: false });
if (res.error) {
this._dumpJoiErrors(res.error.details, filePath);
return false;
@ -157,23 +144,19 @@ export default class AuthLoader {
let allKeys = [];
let arnError = false;
const validatedAuth = res.value;
validatedAuth.accounts.forEach((account) => {
validatedAuth.accounts.forEach(account => {
// backward-compat: ignore arn if starts with 'aws:' and log a
// warning
if (account.arn.startsWith('aws:')) {
this._log.error(
'account must have a valid AWS ARN, legacy examples ' +
"starting with 'aws:' are not supported anymore. " +
'starting with \'aws:\' are not supported anymore. ' +
'Please convert to a proper account entry (see ' +
'examples at https://github.com/scality/S3/blob/' +
'master/conf/authdata.json). Also note that support ' +
'for account users has been dropped.',
{
accountName: account.name,
accountArn: account.arn,
filePath,
}
);
{ accountName: account.name, accountArn: account.arn,
filePath });
arnError = true;
return;
}
@ -183,33 +166,27 @@ export default class AuthLoader {
'turning users into account entries (see examples at ' +
'https://github.com/scality/S3/blob/master/conf/' +
'authdata.json)',
{
accountName: account.name,
accountArn: account.arn,
filePath,
}
);
{ accountName: account.name, accountArn: account.arn,
filePath });
arnError = true;
return;
}
const arnObj = ARN.createFromString(account.arn);
if (arnObj.error) {
this._log.error('authentication config validation error', {
reason: arnObj.error.description,
accountName: account.name,
accountArn: account.arn,
filePath,
});
this._log.error(
'authentication config validation error',
{ reason: arnObj.error.description,
accountName: account.name, accountArn: account.arn,
filePath });
arnError = true;
return;
}
if (!arnObj.isIAMAccount()) {
this._log.error('authentication config validation error', {
reason: 'not an IAM account ARN',
accountName: account.name,
accountArn: account.arn,
filePath,
});
this._log.error(
'authentication config validation error',
{ reason: 'not an IAM account ARN',
accountName: account.name, accountArn: account.arn,
filePath });
arnError = true;
return;
}
@ -219,9 +196,7 @@ export default class AuthLoader {
return false;
}
const uniqueKeysRes = joi.validate(
allKeys,
this._joiKeysValidator.unique('access')
);
allKeys, this._joiKeysValidator.unique('access'));
if (uniqueKeysRes.error) {
this._dumpJoiErrors(uniqueKeysRes.error.details, filePath);
return false;
@ -230,7 +205,7 @@ export default class AuthLoader {
}
_dumpJoiErrors(errors, filePath) {
errors.forEach((err) => {
errors.forEach(err => {
const logInfo = { item: err.path, filePath };
if (err.type === 'array.unique') {
logInfo.reason = `duplicate value '${err.context.path}'`;
@ -239,7 +214,10 @@ export default class AuthLoader {
logInfo.reason = err.message;
logInfo.context = err.context;
}
this._log.error('authentication config validation error', logInfo);
this._log.error('authentication config validation error',
logInfo);
});
}
}
module.exports = AuthLoader;

View File

@ -1,8 +1,12 @@
import * as crypto from 'crypto';
import errors from '../../../errors';
import { calculateSigningKey, hashSignature } from './vaultUtilities';
import Indexer from './Indexer';
import BaseBackend from '../base';
'use strict'; // eslint-disable-line strict
const crypto = require('crypto');
const errors = require('../../../errors');
const calculateSigningKey = require('./vaultUtilities').calculateSigningKey;
const hashSignature = require('./vaultUtilities').hashSignature;
const Indexer = require('./Indexer');
const BaseBackend = require('../base');
function _formatResponse(userInfoToSend) {
return {
@ -19,9 +23,6 @@ function _formatResponse(userInfoToSend) {
* @class InMemoryBackend
*/
class InMemoryBackend extends BaseBackend {
indexer
formatResponse
/**
* @constructor
* @param {string} service - service identifer for construction arn
@ -35,23 +36,15 @@ class InMemoryBackend extends BaseBackend {
this.formatResponse = formatter;
}
verifySignatureV2(
stringToSign,
signatureFromRequest,
accessKey,
options,
callback
) {
verifySignatureV2(stringToSign, signatureFromRequest,
accessKey, options, callback) {
const entity = this.indexer.getEntityByKey(accessKey);
if (!entity) {
return callback(errors.InvalidAccessKeyId);
}
const secretKey = this.indexer.getSecretKey(entity, accessKey);
const reconstructedSig = hashSignature(
stringToSign,
secretKey,
options.algo
);
const reconstructedSig =
hashSignature(stringToSign, secretKey, options.algo);
if (signatureFromRequest !== reconstructedSig) {
return callback(errors.SignatureDoesNotMatch);
}
@ -65,25 +58,16 @@ class InMemoryBackend extends BaseBackend {
return callback(null, vaultReturnObject);
}
verifySignatureV4(
stringToSign,
signatureFromRequest,
accessKey,
region,
scopeDate,
options,
callback
) {
verifySignatureV4(stringToSign, signatureFromRequest, accessKey,
region, scopeDate, options, callback) {
const entity = this.indexer.getEntityByKey(accessKey);
if (!entity) {
return callback(errors.InvalidAccessKeyId);
}
const secretKey = this.indexer.getSecretKey(entity, accessKey);
const signingKey = calculateSigningKey(secretKey, region, scopeDate);
const reconstructedSig = crypto
.createHmac('sha256', signingKey)
.update(stringToSign, 'binary')
.digest('hex');
const reconstructedSig = crypto.createHmac('sha256', signingKey)
.update(stringToSign, 'binary').digest('hex');
if (signatureFromRequest !== reconstructedSig) {
return callback(errors.SignatureDoesNotMatch);
}
@ -99,13 +83,14 @@ class InMemoryBackend extends BaseBackend {
getCanonicalIds(emails, log, cb) {
const results = {};
emails.forEach((email) => {
emails.forEach(email => {
const lowercasedEmail = email.toLowerCase();
const entity = this.indexer.getEntityByEmail(lowercasedEmail);
if (!entity) {
results[email] = 'NotFound';
} else {
results[email] = entity.canonicalID;
results[email] =
entity.canonicalID;
}
});
const vaultReturnObject = {
@ -118,7 +103,7 @@ class InMemoryBackend extends BaseBackend {
getEmailAddresses(canonicalIDs, options, cb) {
const results = {};
canonicalIDs.forEach((canonicalId) => {
canonicalIDs.forEach(canonicalId => {
const foundEntity = this.indexer.getEntityByCanId(canonicalId);
if (!foundEntity || !foundEntity.email) {
results[canonicalId] = 'NotFound';
@ -146,7 +131,7 @@ class InMemoryBackend extends BaseBackend {
*/
getAccountIds(canonicalIDs, options, cb) {
const results = {};
canonicalIDs.forEach((canonicalID) => {
canonicalIDs.forEach(canonicalID => {
const foundEntity = this.indexer.getEntityByCanId(canonicalID);
if (!foundEntity || !foundEntity.shortid) {
results[canonicalID] = 'Not Found';
@ -163,6 +148,7 @@ class InMemoryBackend extends BaseBackend {
}
}
class S3AuthBackend extends InMemoryBackend {
/**
* @constructor

View File

@ -6,7 +6,7 @@
*
* @class Indexer
*/
export default class Indexer {
class Indexer {
/**
* @constructor
* @param {object} authdata - the authentication config file's data
@ -141,3 +141,5 @@ export default class Indexer {
return entity.accountDisplayName;
}
}
module.exports = Indexer;

View File

@ -1,4 +1,4 @@
import AuthLoader from './AuthLoader';
const AuthLoader = require('./AuthLoader');
/**
* @deprecated please use {@link AuthLoader} class instead
@ -9,8 +9,10 @@ import AuthLoader from './AuthLoader';
* @return {boolean} true on erroneous data
* false on success
*/
export default function validateAuthConfig(authdata, logApi) {
function validateAuthConfig(authdata, logApi) {
const authLoader = new AuthLoader(logApi);
authLoader.addAccounts(authdata);
return !authLoader.validate();
}
module.exports = validateAuthConfig;

View File

@ -1,4 +1,6 @@
import * as crypto from 'crypto';
'use strict'; // eslint-disable-line strict
const crypto = require('crypto');
/** hashSignature for v2 Auth
* @param {string} stringToSign - built string to sign per AWS rules
@ -6,11 +8,7 @@ import * as crypto from 'crypto';
* @param {string} algorithm - either SHA256 or SHA1
* @return {string} reconstructed signature
*/
export function hashSignature(
stringToSign: string,
secretKey: string,
algorithm: 'SHA256' | 'SHA1'
): string {
function hashSignature(stringToSign, secretKey, algorithm) {
const hmacObject = crypto.createHmac(algorithm, secretKey);
return hmacObject.update(stringToSign, 'binary').digest('base64');
}
@ -22,12 +20,7 @@ export function hashSignature(
* @param {string} [service] - To specify another service than s3
* @return {string} signingKey - signingKey to calculate signature
*/
export function calculateSigningKey(
secretKey: string,
region: string,
scopeDate: string,
service: string
): string {
function calculateSigningKey(secretKey, region, scopeDate, service) {
const dateKey = crypto.createHmac('sha256', `AWS4${secretKey}`)
.update(scopeDate, 'binary').digest();
const dateRegionKey = crypto.createHmac('sha256', dateKey)
@ -38,3 +31,5 @@ export function calculateSigningKey(
.update('aws4_request', 'binary').digest();
return signingKey;
}
module.exports = { hashSignature, calculateSigningKey };

View File

@ -1,4 +1,6 @@
export default function algoCheck(signatureLength) {
'use strict'; // eslint-disable-line strict
function algoCheck(signatureLength) {
let algo;
// If the signature sent is 44 characters,
// this means that sha256 was used:
@ -13,3 +15,5 @@ export default function algoCheck(signatureLength) {
}
return algo;
}
module.exports = algoCheck;

View File

@ -1,3 +1,5 @@
'use strict'; // eslint-disable-line strict
const headerAuthCheck = require('./headerAuthCheck');
const queryAuthCheck = require('./queryAuthCheck');

View File

@ -1,9 +1,9 @@
'use strict'; // eslint-disable-line strict
import errors from '../../errors';
const errors = require('../../errors');
const epochTime = new Date('1970-01-01').getTime();
export default function checkRequestExpiry(timestamp, log) {
function checkRequestExpiry(timestamp, log) {
// If timestamp is before epochTime, the request is invalid and return
// errors.AccessDenied
if (timestamp < epochTime) {
@ -17,7 +17,7 @@ export default function checkRequestExpiry(timestamp, log) {
log.trace('request timestamp', { requestTimestamp: timestamp });
log.trace('current timestamp', { currentTimestamp: currentTime });
const fifteenMinutes = 15 * 60 * 1000;
const fifteenMinutes = (15 * 60 * 1000);
if (currentTime - timestamp > fifteenMinutes) {
log.trace('request timestamp is not within 15 minutes of current time');
log.debug('request time too skewed', { timestamp });
@ -32,3 +32,5 @@ export default function checkRequestExpiry(timestamp, log) {
return undefined;
}
module.exports = checkRequestExpiry;

View File

@ -1,8 +1,11 @@
import utf8 from 'utf8';
import getCanonicalizedAmzHeaders from './getCanonicalizedAmzHeaders';
import getCanonicalizedResource from './getCanonicalizedResource';
'use strict'; // eslint-disable-line strict
export default function constructStringToSign(request, data, log, clientType?: any) {
const utf8 = require('utf8');
const getCanonicalizedAmzHeaders = require('./getCanonicalizedAmzHeaders');
const getCanonicalizedResource = require('./getCanonicalizedResource');
function constructStringToSign(request, data, log, clientType) {
/*
Build signature per AWS requirements:
StringToSign = HTTP-Verb + '\n' +
@ -20,11 +23,11 @@ export default function constructStringToSign(request, data, log, clientType?: a
const contentMD5 = headers['content-md5'] ?
headers['content-md5'] : query['Content-MD5'];
stringToSign += contentMD5 ? `${contentMD5}\n` : '\n';
stringToSign += (contentMD5 ? `${contentMD5}\n` : '\n');
const contentType = headers['content-type'] ?
headers['content-type'] : query['Content-Type'];
stringToSign += contentType ? `${contentType}\n` : '\n';
stringToSign += (contentType ? `${contentType}\n` : '\n');
/*
AWS docs are conflicting on whether to include x-amz-date header here
@ -39,3 +42,5 @@ export default function constructStringToSign(request, data, log, clientType?: a
+ getCanonicalizedResource(request, clientType);
return utf8.encode(stringToSign);
}
module.exports = constructStringToSign;

View File

@ -1,4 +1,6 @@
export default function getCanonicalizedAmzHeaders(headers, clientType) {
'use strict'; // eslint-disable-line strict
function getCanonicalizedAmzHeaders(headers, clientType) {
/*
Iterate through headers and pull any headers that are x-amz headers.
Need to include 'x-amz-date' here even though AWS docs
@ -39,5 +41,7 @@ export default function getCanonicalizedAmzHeaders(headers, clientType) {
// Build headerString
return amzHeaders.reduce((headerStr, current) =>
`${headerStr}${current[0]}:${current[1]}\n`,
'');
'');
}
module.exports = getCanonicalizedAmzHeaders;

View File

@ -1,4 +1,6 @@
import * as url from 'url';
'use strict'; // eslint-disable-line strict
const url = require('url');
const gcpSubresources = [
'acl',
@ -39,7 +41,7 @@ const awsSubresources = [
'website',
];
export default function getCanonicalizedResource(request, clientType) {
function getCanonicalizedResource(request, clientType) {
/*
This variable is used to determine whether to insert
a '?' or '&'. Once a query parameter is added to the resourceString,
@ -115,3 +117,5 @@ export default function getCanonicalizedResource(request, clientType) {
}
return resourceString;
}
module.exports = getCanonicalizedResource;

View File

@ -1,10 +1,12 @@
import errors from '../../errors';
import * as constants from '../../constants';
import constructStringToSign from './constructStringToSign';
import checkRequestExpiry from './checkRequestExpiry';
import algoCheck from './algoCheck';
'use strict'; // eslint-disable-line strict
export function check(request, log, data) {
const errors = require('../../errors');
const constants = require('../../constants');
const constructStringToSign = require('./constructStringToSign');
const checkRequestExpiry = require('./checkRequestExpiry');
const algoCheck = require('./algoCheck');
function check(request, log, data) {
log.trace('running header auth check');
const headers = request.headers;
@ -15,19 +17,15 @@ export function check(request, log, data) {
}
// Check to make sure timestamp is within 15 minutes of current time
let timestamp = headers['x-amz-date']
? headers['x-amz-date']
: headers.date;
let timestamp = headers['x-amz-date'] ?
headers['x-amz-date'] : headers.date;
timestamp = Date.parse(timestamp);
if (!timestamp) {
log.debug('missing or invalid date header', {
method: 'auth/v2/headerAuthCheck.check',
});
return {
err: errors.AccessDenied.customizeDescription(
'Authentication requires a valid Date or ' + 'x-amz-date header'
),
};
log.debug('missing or invalid date header',
{ method: 'auth/v2/headerAuthCheck.check' });
return { err: errors.AccessDenied.
customizeDescription('Authentication requires a valid Date or ' +
'x-amz-date header') };
}
const err = checkRequestExpiry(timestamp, log);
@ -48,10 +46,8 @@ export function check(request, log, data) {
log.debug('invalid authorization header', { authInfo });
return { err: errors.InvalidArgument };
}
const accessKey =
semicolonIndex > 4
? authInfo.substring(4, semicolonIndex).trim()
: undefined;
const accessKey = semicolonIndex > 4 ?
authInfo.substring(4, semicolonIndex).trim() : undefined;
if (typeof accessKey !== 'string' || accessKey.length === 0) {
log.trace('invalid authorization header', { authInfo });
return { err: errors.MissingSecurityHeader };
@ -84,3 +80,5 @@ export function check(request, log, data) {
},
};
}
module.exports = { check };

View File

@ -1,9 +1,11 @@
import errors from '../../errors';
import * as constants from '../../constants';
import algoCheck from './algoCheck';
import constructStringToSign from './constructStringToSign';
'use strict'; // eslint-disable-line strict
export function check(request, log, data) {
const errors = require('../../errors');
const constants = require('../../constants');
const algoCheck = require('./algoCheck');
const constructStringToSign = require('./constructStringToSign');
function check(request, log, data) {
log.trace('running query auth check');
if (request.method === 'POST') {
log.debug('query string auth not supported for post requests');
@ -26,28 +28,26 @@ export function check(request, log, data) {
*/
const expirationTime = parseInt(data.Expires, 10) * 1000;
if (Number.isNaN(expirationTime)) {
log.debug('invalid expires parameter', { expires: data.Expires });
log.debug('invalid expires parameter',
{ expires: data.Expires });
return { err: errors.MissingSecurityHeader };
}
const currentTime = Date.now();
const preSignedURLExpiry =
process.env.PRE_SIGN_URL_EXPIRY &&
!Number.isNaN(process.env.PRE_SIGN_URL_EXPIRY)
? Number.parseInt(process.env.PRE_SIGN_URL_EXPIRY, 10)
: constants.defaultPreSignedURLExpiry * 1000;
const preSignedURLExpiry = process.env.PRE_SIGN_URL_EXPIRY
&& !Number.isNaN(process.env.PRE_SIGN_URL_EXPIRY)
? Number.parseInt(process.env.PRE_SIGN_URL_EXPIRY, 10)
: constants.defaultPreSignedURLExpiry * 1000;
if (expirationTime > currentTime + preSignedURLExpiry) {
log.debug('expires parameter too far in future', {
expires: request.query.Expires,
});
log.debug('expires parameter too far in future',
{ expires: request.query.Expires });
return { err: errors.AccessDenied };
}
if (currentTime > expirationTime) {
log.debug('current time exceeds expires time', {
expires: request.query.Expires,
});
log.debug('current time exceeds expires time',
{ expires: request.query.Expires });
return { err: errors.RequestTimeTooSkewed };
}
const accessKey = data.AWSAccessKeyId;
@ -82,3 +82,5 @@ export function check(request, log, data) {
},
};
}
module.exports = { check };

View File

@ -1,3 +1,5 @@
'use strict'; // eslint-disable-line strict
/*
AWS's URI encoding rules:
URI encode every byte. Uri-Encode() must enforce the following rules:
@ -30,27 +32,23 @@ function _toHexUTF8(char) {
return res;
}
export default function awsURIencode(input, encodeSlash?: any, noEncodeStar?: any) {
function awsURIencode(input, encodeSlash, noEncodeStar) {
const encSlash = encodeSlash === undefined ? true : encodeSlash;
let encoded = '';
/**
* Duplicate query params are not suppported by AWS S3 APIs. These params
* are parsed as Arrays by Node.js HTTP parser which breaks this method
*/
*/
if (typeof input !== 'string') {
return encoded;
}
for (let i = 0; i < input.length; i++) {
let ch = input.charAt(i);
if (
(ch >= 'A' && ch <= 'Z') ||
if ((ch >= 'A' && ch <= 'Z') ||
(ch >= 'a' && ch <= 'z') ||
(ch >= '0' && ch <= '9') ||
ch === '_' ||
ch === '-' ||
ch === '~' ||
ch === '.'
) {
ch === '_' || ch === '-' ||
ch === '~' || ch === '.') {
encoded = encoded.concat(ch);
} else if (ch === ' ') {
encoded = encoded.concat('%20');
@ -78,3 +76,5 @@ export default function awsURIencode(input, encodeSlash?: any, noEncodeStar?: an
}
return encoded;
}
module.exports = awsURIencode;

View File

@ -1,22 +1,17 @@
import * as crypto from 'crypto';
import createCanonicalRequest from './createCanonicalRequest';
'use strict'; // eslint-disable-line strict
const crypto = require('crypto');
const createCanonicalRequest = require('./createCanonicalRequest');
/**
* constructStringToSign - creates V4 stringToSign
* @param {object} params - params object
* @returns {string} - stringToSign
*/
export default function constructStringToSign(params): string {
const {
request,
signedHeaders,
payloadChecksum,
credentialScope,
timestamp,
query,
log,
proxyPath,
} = params;
function constructStringToSign(params) {
const { request, signedHeaders, payloadChecksum, credentialScope, timestamp,
query, log, proxyPath } = params;
const path = proxyPath || request.path;
const canonicalReqResult = createCanonicalRequest({
@ -39,11 +34,11 @@ export default function constructStringToSign(params): string {
log.debug('constructed canonicalRequest', { canonicalReqResult });
}
const sha256 = crypto.createHash('sha256');
const canonicalHex = sha256
.update(canonicalReqResult, 'binary')
const canonicalHex = sha256.update(canonicalReqResult, 'binary')
.digest('hex');
const stringToSign =
`AWS4-HMAC-SHA256\n${timestamp}\n` +
`${credentialScope}\n${canonicalHex}`;
const stringToSign = `AWS4-HMAC-SHA256\n${timestamp}\n` +
`${credentialScope}\n${canonicalHex}`;
return stringToSign;
}
module.exports = constructStringToSign;

View File

@ -1,6 +1,8 @@
import awsURIencode from './awsURIencode';
import * as crypto from 'crypto';
import * as queryString from 'querystring';
'use strict'; // eslint-disable-line strict
const awsURIencode = require('./awsURIencode');
const crypto = require('crypto');
const queryString = require('querystring');
/**
* createCanonicalRequest - creates V4 canonical request
@ -10,7 +12,7 @@ import * as queryString from 'querystring';
* payloadChecksum (from request)
* @returns {string} - canonicalRequest
*/
export default function createCanonicalRequest(params) {
function createCanonicalRequest(params) {
const pHttpVerb = params.pHttpVerb;
const pResource = params.pResource;
const pQuery = params.pQuery;
@ -85,3 +87,5 @@ export default function createCanonicalRequest(params) {
`${signedHeaders}\n${payloadChecksum}`;
return canonicalRequest;
}
module.exports = createCanonicalRequest;

View File

@ -1,16 +1,16 @@
import errors from '../../../lib/errors';
import * as constants from '../../constants';
import constructStringToSign from './constructStringToSign';
import {
checkTimeSkew,
convertUTCtoISO8601,
convertAmzTimeToMs,
} from './timeUtils';
import {
extractAuthItems,
validateCredentials,
areSignedHeadersComplete,
} from './validateInputs';
'use strict'; // eslint-disable-line strict
const errors = require('../../../lib/errors');
const constants = require('../../constants');
const constructStringToSign = require('./constructStringToSign');
const checkTimeSkew = require('./timeUtils').checkTimeSkew;
const convertUTCtoISO8601 = require('./timeUtils').convertUTCtoISO8601;
const convertAmzTimeToMs = require('./timeUtils').convertAmzTimeToMs;
const extractAuthItems = require('./validateInputs').extractAuthItems;
const validateCredentials = require('./validateInputs').validateCredentials;
const areSignedHeadersComplete =
require('./validateInputs').areSignedHeadersComplete;
/**
* V4 header auth check
@ -21,7 +21,7 @@ import {
* @param {string} awsService - Aws service ('iam' or 's3')
* @return {callback} calls callback
*/
export function check(request, log, data, awsService) {
function check(request, log, data, awsService) {
log.trace('running header auth check');
const token = request.headers['x-amz-security-token'];
@ -51,9 +51,8 @@ export function check(request, log, data, awsService) {
if (payloadChecksum === 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') {
log.trace('requesting streaming v4 auth');
if (request.method !== 'PUT') {
log.debug('streaming v4 auth for put only', {
method: 'auth/v4/headerAuthCheck.check',
});
log.debug('streaming v4 auth for put only',
{ method: 'auth/v4/headerAuthCheck.check' });
return { err: errors.InvalidArgument };
}
if (!request.headers['x-amz-decoded-content-length']) {
@ -78,12 +77,9 @@ export function check(request, log, data, awsService) {
if (xAmzDate) {
const xAmzDateArr = xAmzDate.split('T');
// check that x-amz- date has the correct format and after epochTime
if (
xAmzDateArr.length === 2 &&
xAmzDateArr[0].length === 8 &&
xAmzDateArr[1].length === 7 &&
Number.parseInt(xAmzDateArr[0], 10) > 19700101
) {
if (xAmzDateArr.length === 2 && xAmzDateArr[0].length === 8
&& xAmzDateArr[1].length === 7
&& Number.parseInt(xAmzDateArr[0], 10) > 19700101) {
// format of x-amz- date is ISO 8601: YYYYMMDDTHHMMSSZ
timestamp = request.headers['x-amz-date'];
}
@ -91,27 +87,18 @@ export function check(request, log, data, awsService) {
timestamp = convertUTCtoISO8601(request.headers.date);
}
if (!timestamp) {
log.debug('missing or invalid date header', {
method: 'auth/v4/headerAuthCheck.check',
});
return {
err: errors.AccessDenied.customizeDescription(
'Authentication requires a valid Date or ' + 'x-amz-date header'
),
};
log.debug('missing or invalid date header',
{ method: 'auth/v4/headerAuthCheck.check' });
return { err: errors.AccessDenied.
customizeDescription('Authentication requires a valid Date or ' +
'x-amz-date header') };
}
const validationResult = validateCredentials(
credentialsArr,
timestamp,
log
);
const validationResult = validateCredentials(credentialsArr, timestamp,
log);
if (validationResult instanceof Error) {
log.debug('credentials in improper format', {
credentialsArr,
timestamp,
validationResult,
});
log.debug('credentials in improper format', { credentialsArr,
timestamp, validationResult });
return { err: validationResult };
}
// credentialsArr is [accessKey, date, region, aws-service, aws4_request]
@ -134,7 +121,7 @@ export function check(request, log, data, awsService) {
// expiry is as set out in the policy.
// 15 minutes in seconds
const expiry = 15 * 60;
const expiry = (15 * 60);
const isTimeSkewed = checkTimeSkew(timestamp, expiry, log);
if (isTimeSkewed) {
return { err: errors.RequestTimeTooSkewed };
@ -146,11 +133,8 @@ export function check(request, log, data, awsService) {
proxyPath = decodeURIComponent(request.headers.proxy_path);
} catch (err) {
log.debug('invalid proxy_path header', { proxyPath, err });
return {
err: errors.InvalidArgument.customizeDescription(
'invalid proxy_path header'
),
};
return { err: errors.InvalidArgument.customizeDescription(
'invalid proxy_path header') };
}
}
@ -170,6 +154,7 @@ export function check(request, log, data, awsService) {
return { err: stringToSign };
}
return {
err: null,
params: {
@ -193,3 +178,5 @@ export function check(request, log, data, awsService) {
},
};
}
module.exports = { check };

View File

@ -1,10 +1,15 @@
import * as constants from '../../constants';
import errors from '../../errors';
'use strict'; // eslint-disable-line strict
import constructStringToSign from './constructStringToSign';
import { checkTimeSkew, convertAmzTimeToMs } from './timeUtils';
import { validateCredentials, extractQueryParams } from './validateInputs';
import { areSignedHeadersComplete } from './validateInputs';
const constants = require('../../constants');
const errors = require('../../errors');
const constructStringToSign = require('./constructStringToSign');
const checkTimeSkew = require('./timeUtils').checkTimeSkew;
const convertAmzTimeToMs = require('./timeUtils').convertAmzTimeToMs;
const validateCredentials = require('./validateInputs').validateCredentials;
const extractQueryParams = require('./validateInputs').extractQueryParams;
const areSignedHeadersComplete =
require('./validateInputs').areSignedHeadersComplete;
/**
* V4 query auth check
@ -13,7 +18,7 @@ import { areSignedHeadersComplete } from './validateInputs';
* @param {object} data - Contain authentification params (GET or POST data)
* @return {callback} calls callback
*/
export function check(request, log, data) {
function check(request, log, data) {
const authParams = extractQueryParams(data, log);
if (Object.keys(authParams).length !== 5) {
@ -39,13 +44,11 @@ export function check(request, log, data) {
return { err: errors.AccessDenied };
}
const validationResult = validateCredentials(credential, timestamp, log);
const validationResult = validateCredentials(credential, timestamp,
log);
if (validationResult instanceof Error) {
log.debug('credentials in improper format', {
credential,
timestamp,
validationResult,
});
log.debug('credentials in improper format', { credential,
timestamp, validationResult });
return { err: validationResult };
}
const accessKey = credential[0];
@ -65,11 +68,8 @@ export function check(request, log, data) {
proxyPath = decodeURIComponent(request.headers.proxy_path);
} catch (err) {
log.debug('invalid proxy_path header', { proxyPath });
return {
err: errors.InvalidArgument.customizeDescription(
'invalid proxy_path header'
),
};
return { err: errors.InvalidArgument.customizeDescription(
'invalid proxy_path header') };
}
}
@ -95,7 +95,8 @@ export function check(request, log, data) {
signedHeaders,
payloadChecksum,
timestamp,
credentialScope: `${scopeDate}/${region}/${service}/${requestType}`,
credentialScope:
`${scopeDate}/${region}/${service}/${requestType}`,
awsService: service,
proxyPath,
});
@ -121,3 +122,5 @@ export function check(request, log, data) {
},
};
}
module.exports = { check };

View File

@ -1,31 +1,15 @@
import { Transform } from 'stream';
import async from 'async';
import errors from '../../../errors';
import constructChunkStringToSign from './constructChunkStringToSign';
const { Transform } = require('stream');
const async = require('async');
const errors = require('../../../errors');
const constructChunkStringToSign = require('./constructChunkStringToSign');
/**
* This class is designed to handle the chunks sent in a streaming
* v4 Auth request
*/
export default class V4Transform extends Transform {
log;
cb;
accessKey;
region;
scopeDate;
timestamp;
credentialScope;
lastSignature;
currentSignature;
haveMetadata;
seekingDataSize;
currentData;
dataCursor;
currentMetadata;
lastPieceDone;
lastChunk;
vault;
class V4Transform extends Transform {
/**
* @constructor
* @param {object} streamingV4Params - info for chunk authentication
@ -44,14 +28,8 @@ export default class V4Transform extends Transform {
* @param {function} cb - callback to api
*/
constructor(streamingV4Params, vault, log, cb) {
const {
accessKey,
signatureFromRequest,
region,
scopeDate,
timestamp,
credentialScope,
} = streamingV4Params;
const { accessKey, signatureFromRequest, region, scopeDate, timestamp,
credentialScope } = streamingV4Params;
super({});
this.log = log;
this.cb = cb;
@ -101,30 +79,28 @@ export default class V4Transform extends Transform {
this.currentMetadata.push(remainingPlusStoredMetadata);
return { completeMetadata: false };
}
let fullMetadata = remainingPlusStoredMetadata.slice(0, lineBreakIndex);
let fullMetadata = remainingPlusStoredMetadata.slice(0,
lineBreakIndex);
// handle extra line break on end of data chunk
if (fullMetadata.length === 0) {
const chunkWithoutLeadingLineBreak =
remainingPlusStoredMetadata.slice(2);
const chunkWithoutLeadingLineBreak = remainingPlusStoredMetadata
.slice(2);
// find second line break
lineBreakIndex = chunkWithoutLeadingLineBreak.indexOf('\r\n');
if (lineBreakIndex < 0) {
this.currentMetadata.push(chunkWithoutLeadingLineBreak);
return { completeMetadata: false };
}
fullMetadata = chunkWithoutLeadingLineBreak.slice(
0,
lineBreakIndex
);
fullMetadata = chunkWithoutLeadingLineBreak.slice(0,
lineBreakIndex);
}
const splitMeta = fullMetadata.toString().split(';');
this.log.trace('parsed full metadata for chunk', { splitMeta });
if (splitMeta.length !== 2) {
this.log.trace(
'chunk body did not contain correct ' + 'metadata format'
);
this.log.trace('chunk body did not contain correct ' +
'metadata format');
return { err: errors.InvalidArgument };
}
let dataSize = splitMeta[0];
@ -156,9 +132,8 @@ export default class V4Transform extends Transform {
completeMetadata: true,
// start slice at lineBreak plus 2 to remove line break at end of
// metadata piece since length of '\r\n' is 2
unparsedChunk: remainingPlusStoredMetadata.slice(
lineBreakIndex + 2
),
unparsedChunk: remainingPlusStoredMetadata
.slice(lineBreakIndex + 2),
};
}
@ -171,13 +146,10 @@ export default class V4Transform extends Transform {
*/
_authenticate(dataToSend, done) {
// use prior sig to construct new string to sign
const stringToSign = constructChunkStringToSign(
this.timestamp,
this.credentialScope,
this.lastSignature,
dataToSend
);
this.log.trace('constructed chunk string to sign', { stringToSign });
const stringToSign = constructChunkStringToSign(this.timestamp,
this.credentialScope, this.lastSignature, dataToSend);
this.log.trace('constructed chunk string to sign',
{ stringToSign });
// once used prior sig to construct string to sign, reassign
// lastSignature to current signature
this.lastSignature = this.currentSignature;
@ -193,18 +165,17 @@ export default class V4Transform extends Transform {
credentialScope: this.credentialScope,
},
};
return this.vault.authenticateV4Request(vaultParams, null, (err) => {
return this.vault.authenticateV4Request(vaultParams, null, err => {
if (err) {
this.log.trace('err from vault on streaming v4 auth', {
error: err,
paramsSentToVault: vaultParams.data,
});
this.log.trace('err from vault on streaming v4 auth',
{ error: err, paramsSentToVault: vaultParams.data });
return done(err);
}
return done();
});
}
/**
* This function will parse the chunk into metadata and data,
* use the metadata to authenticate with vault and send the
@ -224,10 +195,9 @@ export default class V4Transform extends Transform {
if (this.lastPieceDone) {
const slice = chunk.slice(0, 10);
this.log.trace(
'received chunk after end.' + 'See first 10 bytes of chunk',
{ chunk: slice.toString() }
);
this.log.trace('received chunk after end.' +
'See first 10 bytes of chunk',
{ chunk: slice.toString() });
return callback();
}
let unparsedChunk = chunk;
@ -236,11 +206,10 @@ export default class V4Transform extends Transform {
// test function
() => chunkLeftToEvaluate,
// async function
(done) => {
done => {
if (!this.haveMetadata) {
this.log.trace(
'do not have metadata so calling ' + '_parseMetadata'
);
this.log.trace('do not have metadata so calling ' +
'_parseMetadata');
// need to parse our metadata
const parsedMetadataResults =
this._parseMetadata(unparsedChunk);
@ -258,7 +227,7 @@ export default class V4Transform extends Transform {
}
if (this.lastChunk) {
this.log.trace('authenticating final chunk with no data');
return this._authenticate(null, (err) => {
return this._authenticate(null, err => {
if (err) {
return done(err);
}
@ -277,18 +246,17 @@ export default class V4Transform extends Transform {
}
// parse just the next data piece without \r\n at the end
// (therefore, minus 2)
const nextDataPiece = unparsedChunk.slice(
0,
this.seekingDataSize - 2
);
const nextDataPiece =
unparsedChunk.slice(0, this.seekingDataSize - 2);
// add parsed data piece to other currentData pieces
// so that this.currentData is the full data piece
nextDataPiece.copy(this.currentData, this.dataCursor);
return this._authenticate(this.currentData, (err) => {
return this._authenticate(this.currentData, err => {
if (err) {
return done(err);
}
unparsedChunk = unparsedChunk.slice(this.seekingDataSize);
unparsedChunk =
unparsedChunk.slice(this.seekingDataSize);
this.push(this.currentData);
this.haveMetadata = false;
this.seekingDataSize = -1;
@ -299,7 +267,7 @@ export default class V4Transform extends Transform {
});
},
// final callback
(err) => {
err => {
if (err) {
return this.cb(err);
}
@ -309,3 +277,5 @@ export default class V4Transform extends Transform {
);
}
}
module.exports = V4Transform;

View File

@ -1,5 +1,6 @@
import * as crypto from 'crypto';
import * as constants from '../../../constants';
const crypto = require('crypto');
const constants = require('../../../constants');
/**
* Constructs stringToSign for chunk
@ -12,12 +13,8 @@ import * as constants from '../../../constants';
* @param {string} justDataChunk - data portion of chunk
* @returns {string} stringToSign
*/
export default function constructChunkStringToSign(
timestamp: string,
credentialScope: string,
lastSignature: string,
justDataChunk: string
): string {
function constructChunkStringToSign(timestamp,
credentialScope, lastSignature, justDataChunk) {
let currentChunkHash;
// for last chunk, there will be no data, so use emptyStringHash
if (!justDataChunk) {
@ -25,12 +22,11 @@ export default function constructChunkStringToSign(
} else {
currentChunkHash = crypto.createHash('sha256');
currentChunkHash = currentChunkHash
.update(justDataChunk, 'binary')
.digest('hex');
.update(justDataChunk, 'binary').digest('hex');
}
return (
`AWS4-HMAC-SHA256-PAYLOAD\n${timestamp}\n` +
return `AWS4-HMAC-SHA256-PAYLOAD\n${timestamp}\n` +
`${credentialScope}\n${lastSignature}\n` +
`${constants.emptyStringHash}\n${currentChunkHash}`
);
`${constants.emptyStringHash}\n${currentChunkHash}`;
}
module.exports = constructChunkStringToSign;

View File

@ -1,10 +1,12 @@
'use strict'; // eslint-disable-line strict
/**
* Convert timestamp to milliseconds since Unix Epoch
* @param {string} timestamp of ISO8601Timestamp format without
* dashes or colons, e.g. 20160202T220410Z
* @return {number} number of milliseconds since Unix Epoch
*/
export function convertAmzTimeToMs(timestamp) {
function convertAmzTimeToMs(timestamp) {
const arr = timestamp.split('');
// Convert to YYYY-MM-DDTHH:mm:ss.sssZ
const ISO8601time = `${arr.slice(0, 4).join('')}-${arr[4]}${arr[5]}` +
@ -19,7 +21,7 @@ export function convertAmzTimeToMs(timestamp) {
* @param {string} timestamp of UTC form: Fri, 10 Feb 2012 21:34:55 GMT
* @return {string} ISO8601 timestamp of form: YYYYMMDDTHHMMSSZ
*/
export function convertUTCtoISO8601(timestamp) {
function convertUTCtoISO8601(timestamp) {
// convert to ISO string: YYYY-MM-DDTHH:mm:ss.sssZ.
const converted = new Date(timestamp).toISOString();
// Remove "-"s and "."s and milliseconds
@ -34,7 +36,7 @@ export function convertUTCtoISO8601(timestamp) {
* @param {object} log - log for request
* @return {boolean} true if there is a time problem
*/
export function checkTimeSkew(timestamp, expiry, log) {
function checkTimeSkew(timestamp, expiry, log) {
const currentTime = Date.now();
const fifteenMinutes = (15 * 60 * 1000);
const parsedTimestamp = convertAmzTimeToMs(timestamp);
@ -54,3 +56,5 @@ export function checkTimeSkew(timestamp, expiry, log) {
}
return false;
}
module.exports = { convertAmzTimeToMs, convertUTCtoISO8601, checkTimeSkew };

View File

@ -1,4 +1,6 @@
import errors from '../../../lib/errors';
'use strict'; // eslint-disable-line strict
const errors = require('../../../lib/errors');
/**
* Validate Credentials
@ -9,7 +11,7 @@ import errors from '../../../lib/errors';
* @param {object} log - logging object
* @return {boolean} true if credentials are correct format, false if not
*/
export function validateCredentials(credentials, timestamp, log) {
function validateCredentials(credentials, timestamp, log) {
if (!Array.isArray(credentials) || credentials.length !== 5) {
log.warn('credentials in improper format', { credentials });
return errors.InvalidArgument;
@ -23,39 +25,32 @@ export function validateCredentials(credentials, timestamp, log) {
log.warn('accessKey provided is wrong format', { accessKey });
return errors.InvalidArgument;
}
// The scope date (format YYYYMMDD) must be same date as the timestamp
// on the request from the x-amz-date param (if queryAuthCheck)
// or from the x-amz-date header or date header (if headerAuthCheck)
// Format of timestamp is ISO 8601: YYYYMMDDTHHMMSSZ.
// http://docs.aws.amazon.com/AmazonS3/latest/API/
// sigv4-query-string-auth.html
// http://docs.aws.amazon.com/general/latest/gr/
// sigv4-date-handling.html
// The scope date (format YYYYMMDD) must be same date as the timestamp
// on the request from the x-amz-date param (if queryAuthCheck)
// or from the x-amz-date header or date header (if headerAuthCheck)
// Format of timestamp is ISO 8601: YYYYMMDDTHHMMSSZ.
// http://docs.aws.amazon.com/AmazonS3/latest/API/
// sigv4-query-string-auth.html
// http://docs.aws.amazon.com/general/latest/gr/
// sigv4-date-handling.html
// convert timestamp to format of scopeDate YYYYMMDD
// convert timestamp to format of scopeDate YYYYMMDD
const timestampDate = timestamp.split('T')[0];
if (scopeDate.length !== 8 || scopeDate !== timestampDate) {
log.warn('scope date must be the same date as the timestamp date', {
scopeDate,
timestampDate,
});
log.warn('scope date must be the same date as the timestamp date',
{ scopeDate, timestampDate });
return errors.RequestTimeTooSkewed;
}
if (
service !== 's3' &&
service !== 'iam' &&
service !== 'ring' &&
service !== 'sts'
) {
if (service !== 's3' && service !== 'iam' && service !== 'ring' &&
service !== 'sts') {
log.warn('service in credentials is not one of s3/iam/ring/sts', {
service,
});
return errors.InvalidArgument;
}
if (requestType !== 'aws4_request') {
log.warn('requestType contained in params is not aws4_request', {
requestType,
});
log.warn('requestType contained in params is not aws4_request',
{ requestType });
return errors.InvalidArgument;
}
return {};
@ -67,14 +62,13 @@ export function validateCredentials(credentials, timestamp, log) {
* @param {object} log - logging object
* @return {object} object containing extracted query params for authV4
*/
export function extractQueryParams(queryObj, log) {
function extractQueryParams(queryObj, log) {
const authParams = {};
// Do not need the algorithm sent back
if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') {
log.warn('algorithm param incorrect', {
algo: queryObj['X-Amz-Algorithm'],
});
log.warn('algorithm param incorrect',
{ algo: queryObj['X-Amz-Algorithm'] });
return authParams;
}
@ -87,6 +81,7 @@ export function extractQueryParams(queryObj, log) {
return authParams;
}
const signature = queryObj['X-Amz-Signature'];
if (signature && signature.length === 64) {
authParams.signatureFromRequest = signature;
@ -99,15 +94,14 @@ export function extractQueryParams(queryObj, log) {
if (timestamp && timestamp.length === 16) {
authParams.timestamp = timestamp;
} else {
log.warn('missing or invalid timestamp', {
timestamp: queryObj['X-Amz-Date'],
});
log.warn('missing or invalid timestamp',
{ timestamp: queryObj['X-Amz-Date'] });
return authParams;
}
const expiry = Number.parseInt(queryObj['X-Amz-Expires'], 10);
const sevenDays = 604800;
if (expiry && expiry > 0 && expiry <= sevenDays) {
if (expiry && (expiry > 0 && expiry <= sevenDays)) {
authParams.expiry = expiry;
} else {
log.warn('invalid expiry', { expiry });
@ -124,15 +118,17 @@ export function extractQueryParams(queryObj, log) {
return authParams;
}
/**
* Extract and validate components from auth header
* @param {string} authHeader - authorization header from request
* @param {object} log - logging object
* @return {object} object containing extracted auth header items for authV4
*/
export function extractAuthItems(authHeader, log) {
function extractAuthItems(authHeader, log) {
const authItems = {};
const authArray = authHeader.replace('AWS4-HMAC-SHA256 ', '').split(',');
const authArray = authHeader
.replace('AWS4-HMAC-SHA256 ', '').split(',');
if (authArray.length < 3) {
return authItems;
@ -142,34 +138,25 @@ export function extractAuthItems(authHeader, log) {
const signedHeadersStr = authArray[1];
const signatureStr = authArray[2];
log.trace('credentials from request', { credentialStr });
if (
credentialStr &&
credentialStr.trim().startsWith('Credential=') &&
credentialStr.indexOf('/') > -1
) {
if (credentialStr && credentialStr.trim().startsWith('Credential=')
&& credentialStr.indexOf('/') > -1) {
authItems.credentialsArr = credentialStr
.trim()
.replace('Credential=', '')
.split('/');
.trim().replace('Credential=', '').split('/');
} else {
log.warn('missing credentials');
}
log.trace('signed headers from request', { signedHeadersStr });
if (
signedHeadersStr &&
signedHeadersStr.trim().startsWith('SignedHeaders=')
) {
if (signedHeadersStr && signedHeadersStr.trim()
.startsWith('SignedHeaders=')) {
authItems.signedHeaders = signedHeadersStr
.trim()
.replace('SignedHeaders=', '');
.trim().replace('SignedHeaders=', '');
} else {
log.warn('missing signed headers');
}
log.trace('signature from request', { signatureStr });
if (signatureStr && signatureStr.trim().startsWith('Signature=')) {
authItems.signatureFromRequest = signatureStr
.trim()
.replace('Signature=', '');
.trim().replace('Signature=', '');
} else {
log.warn('missing signature');
}
@ -183,20 +170,21 @@ export function extractAuthItems(authHeader, log) {
* @param {object} allHeaders - request.headers
* @return {boolean} true if all x-amz-headers included and false if not
*/
export function areSignedHeadersComplete(signedHeaders, allHeaders) {
function areSignedHeadersComplete(signedHeaders, allHeaders) {
const signedHeadersList = signedHeaders.split(';');
if (signedHeadersList.indexOf('host') === -1) {
return false;
}
const headers = Object.keys(allHeaders);
for (let i = 0; i < headers.length; i++) {
if (
(headers[i].startsWith('x-amz-') ||
headers[i].startsWith('x-scal-')) &&
signedHeadersList.indexOf(headers[i]) === -1
) {
if ((headers[i].startsWith('x-amz-')
|| headers[i].startsWith('x-scal-'))
&& signedHeadersList.indexOf(headers[i]) === -1) {
return false;
}
}
return true;
}
module.exports = { validateCredentials, extractQueryParams,
areSignedHeadersComplete, extractAuthItems };

View File

@ -103,11 +103,11 @@ module.exports = {
gcpTaggingPrefix: 'aws-tag-',
productName: 'APN/1.0 Scality/1.0 Scality CloudServer for Zenko',
legacyLocations: ['sproxyd', 'legacy'],
// healthcheck default call from nginx is every 2 seconds
// healthcheck default call from nginx is every 2 seconds
// for external backends, don't call unless at least 1 minute
// (60,000 milliseconds) since last call
externalBackendHealthCheckInterval: 60000,
// some of the available data backends (if called directly rather
// some of the available data backends (if called directly rather
// than through the multiple backend gateway) need a key provided
// as a string as first parameter of the get/delete methods.
clientsRequireStringKey: { sproxyd: true, cdmi: true },

View File

@ -1,4 +1,4 @@
import { LevelDB } from 'level';
'use strict'; // eslint-disable-line strict
const writeOptions = { sync: true };
@ -18,7 +18,7 @@ const writeOptions = { sync: true };
* @param {String} message - the Error message.
* @returns {Error} the Error object.
*/
function propError(propName: string, message: string): Error {
function propError(propName, message) {
const err = new Error(message);
err[propName] = true;
return err;
@ -27,7 +27,7 @@ function propError(propName: string, message: string): Error {
/**
* Running transaction with multiple updates to be committed atomically
*/
export class IndexTransaction {
class IndexTransaction {
/**
* Builds a new transaction
*
@ -36,7 +36,7 @@ export class IndexTransaction {
*
* @returns {IndexTransaction} a new empty transaction
*/
constructor(db: LevelDB) {
constructor(db) {
this.operations = [];
this.db = db;
this.closed = false;
@ -63,17 +63,13 @@ export class IndexTransaction {
*/
push(op) {
if (this.closed) {
throw propError(
'pushOnCommittedTransaction',
'can not add ops to already committed transaction'
);
throw propError('pushOnCommittedTransaction',
'can not add ops to already committed transaction');
}
if (op.type !== 'put' && op.type !== 'del') {
throw propError(
'invalidTransactionVerb',
`unknown action type: ${op.type}`
);
throw propError('invalidTransactionVerb',
`unknown action type: ${op.type}`);
}
if (op.key === undefined) {
@ -140,22 +136,14 @@ export class IndexTransaction {
*/
addCondition(condition) {
if (this.closed) {
throw propError(
'pushOnCommittedTransaction',
'can not add conditions to already committed transaction'
);
throw propError('pushOnCommittedTransaction',
'can not add conditions to already committed transaction');
}
if (condition === undefined || Object.keys(condition).length === 0) {
throw propError(
'missingCondition',
'missing condition for conditional put'
);
throw propError('missingCondition', 'missing condition for conditional put');
}
if (typeof condition.notExists !== 'string') {
throw propError(
'unsupportedConditionalOperation',
'missing key or supported condition'
);
if (typeof (condition.notExists) !== 'string') {
throw propError('unsupportedConditionalOperation', 'missing key or supported condition');
}
this.conditions.push(condition);
}
@ -170,21 +158,13 @@ export class IndexTransaction {
*/
commit(cb) {
if (this.closed) {
return cb(
propError(
'alreadyCommitted',
'transaction was already committed'
)
);
return cb(propError('alreadyCommitted',
'transaction was already committed'));
}
if (this.operations.length === 0) {
return cb(
propError(
'emptyTransaction',
'tried to commit an empty transaction'
)
);
return cb(propError('emptyTransaction',
'tried to commit an empty transaction'));
}
this.closed = true;
@ -196,3 +176,7 @@ export class IndexTransaction {
return this.db.batch(this.operations, writeOptions, cb);
}
}
module.exports = {
IndexTransaction,
};

View File

@ -1,4 +1,4 @@
export function reshapeExceptionError(error) {
function reshapeExceptionError(error) {
const { message, code, stack, name } = error;
return {
message,
@ -7,3 +7,7 @@ export function reshapeExceptionError(error) {
name,
};
}
module.exports = {
reshapeExceptionError,
};

View File

@ -1,14 +1,11 @@
import errorsObj from '../errors/arsenalErrors.json';
'use strict'; // eslint-disable-line strict
/**
* ArsenalError
*
* @extends {Error}
*/
export class ArsenalError extends Error {
code: number
description: string
class ArsenalError extends Error {
/**
* constructor.
*
@ -16,7 +13,7 @@ export class ArsenalError extends Error {
* @param {number} code - HTTP status code
* @param {string} desc - Verbose description of error
*/
constructor(type: string, code: number, desc: string) {
constructor(type, code, desc) {
super(type);
/**
@ -68,12 +65,23 @@ export class ArsenalError extends Error {
}
}
const errors: { [key: string]: ArsenalError } = {};
Object.keys(errorsObj)
.filter((index) => index !== '_comment')
.forEach((index) => {
const { code, description } = errorsObj[index];
errors[index] = new ArsenalError(index, code, description);
});
/**
* Generate an Errors instances object.
*
* @returns {Object.<string, ArsenalError>} - object field by arsenalError
* instances
*/
function errorsGen() {
const errors = {};
const errorsObj = require('../errors/arsenalErrors.json');
export default errors;
Object.keys(errorsObj)
.filter(index => index !== '_comment')
.forEach(index => {
errors[index] = new ArsenalError(index, errorsObj[index].code,
errorsObj[index].description);
});
return errors;
}
module.exports = errorsGen();

View File

@ -17,9 +17,9 @@ describe('decyrptSecret', () => {
describe('parseServiceCredentials', () => {
const conf = {
users: [{ accessKey,
accountType: 'service-clueso',
secretKey,
userName: 'Search Service Account' }],
accountType: 'service-clueso',
secretKey,
userName: 'Search Service Account' }],
};
const auth = JSON.stringify({ privateKey });

View File

@ -1,4 +1,6 @@
export const ciphers = [
'use strict'; // eslint-disable-line strict
const ciphers = [
'DHE-RSA-AES128-GCM-SHA256',
'ECDHE-ECDSA-AES128-GCM-SHA256',
'ECDHE-RSA-AES256-GCM-SHA384',
@ -26,3 +28,7 @@ export const ciphers = [
'!EDH-RSA-DES-CBC3-SHA',
'!KRB5-DES-CBC3-SHA',
].join(':');
module.exports = {
ciphers,
};

View File

@ -29,11 +29,16 @@ c2CNfUEqyRbJF4pE9ZcdQReT5p/llmyhQdvq6cHH+cKJk63C6DHRVoStsnsUcvKe
bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg==
-----END DH PARAMETERS-----
*/
'use strict'; // eslint-disable-line strict
export const dhparam =
const dhparam =
'MIIBCAKCAQEAh99T77KGNuiY9N6xrCJ3QNv4SFADTa3CD+1VMTAdRJLHUNpglB+i' +
'AoTYiLDFZgtTCpx0ZZUD+JM3qiCZy0OK5/ZGlVD7sZmxjRtdpVK4qIPtwav8t0J7' +
'c2CNfUEqyRbJF4pE9ZcdQReT5p/llmyhQdvq6cHH+cKJk63C6DHRVoStsnsUcvKe' +
'23PLGZulKg8H3eRBxHamHkmyuEVDtoNhMIoJONsdXSpo5GgcD4EQMM8xb/qsnCxn' +
'6QIGTBvcHskxtlTZOfUPk4XQ6Yb3tQi2TurzkQHLln4U7p/GZs+D+6D3SgSPqr6P' +
'bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg==';
module.exports = {
dhparam,
};

View File

@ -1,2 +0,0 @@
export * as ciphers from './ciphers'
export * as dhparam from './dh2048'

View File

@ -1,4 +1,6 @@
import ipaddr from 'ipaddr.js';
'use strict'; // eslint-disable-line strict
const ipaddr = require('ipaddr.js');
/**
* checkIPinRangeOrMatch checks whether a given ip address is in an ip address
@ -7,7 +9,7 @@ import ipaddr from 'ipaddr.js';
* @param {object} ip - parsed ip address
* @return {boolean} true if in range, false if not
*/
export function checkIPinRangeOrMatch(cidr, ip) {
function checkIPinRangeOrMatch(cidr, ip) {
// If there is an exact match of the ip address, no need to check ranges
if (ip.toString() === cidr) {
return true;
@ -37,7 +39,7 @@ export function checkIPinRangeOrMatch(cidr, ip) {
* @param {string} ip - IPV4/IPV6/IPV4-mapped IPV6 address
* @return {object} parsedIp - Object representation of parsed IP
*/
export function parseIp(ip) {
function parseIp(ip) {
if (ipaddr.IPv4.isValid(ip)) {
return ipaddr.parse(ip);
}
@ -58,7 +60,7 @@ export function parseIp(ip) {
* @param {string} ip - IP address
* @return {boolean} - true if there is match or false for no match
*/
export function ipMatchCidrList(cidrList, ip) {
function ipMatchCidrList(cidrList, ip) {
const parsedIp = parseIp(ip);
return cidrList.some(item => {
let cidr;
@ -73,3 +75,9 @@ export function ipMatchCidrList(cidrList, ip) {
return checkIPinRangeOrMatch(cidr || item, parsedIp);
});
}
module.exports = {
checkIPinRangeOrMatch,
ipMatchCidrList,
parseIp,
};

View File

@ -1,5 +1,6 @@
import { debuglog } from 'util';
const debug = debuglog('jsutil');
'use strict'; // eslint-disable-line
const debug = require('util').debuglog('jsutil');
// JavaScript utility functions
@ -16,7 +17,7 @@ const debug = debuglog('jsutil');
* @return {function} a callable wrapper mirroring <tt>func</tt> but
* only calls <tt>func</tt> at first invocation.
*/
export function once(func) {
module.exports.once = function once(func) {
const state = { called: false, res: undefined };
return function wrapper(...args) {
if (!state.called) {
@ -24,7 +25,7 @@ export function once(func) {
state.res = func.apply(func, args);
} else {
debug('function already called:', func,
'returning cached result:', state.res);
'returning cached result:', state.res);
}
return state.res;
};

View File

@ -1,19 +1,17 @@
import Redis from 'ioredis';
export default class RedisClient {
_client: Redis
const Redis = require('ioredis');
class RedisClient {
/**
* @constructor
* @param {Object} config - config
* @param {string} config.host - Redis host
* @param {number} config.port - Redis port
* @param {string} config.password - Redis password
* @param {werelogs.Logger} logger - logger instance
*/
* @constructor
* @param {Object} config - config
* @param {string} config.host - Redis host
* @param {number} config.port - Redis port
* @param {string} config.password - Redis password
* @param {werelogs.Logger} logger - logger instance
*/
constructor(config, logger) {
this._client = new Redis(config);
this._client.on('error', (err) =>
this._client.on('error', err =>
logger.trace('error from redis', {
error: err,
method: 'RedisClient.constructor',
@ -25,18 +23,18 @@ export default class RedisClient {
}
/**
* scan a pattern and return matching keys
* @param {string} pattern - string pattern to match with all existing keys
* @param {number} [count=10] - scan count
* @param {callback} cb - callback (error, result)
* @return {undefined}
*/
scan(pattern: string, count = 10, cb) {
* scan a pattern and return matching keys
* @param {string} pattern - string pattern to match with all existing keys
* @param {number} [count=10] - scan count
* @param {callback} cb - callback (error, result)
* @return {undefined}
*/
scan(pattern, count = 10, cb) {
const params = { match: pattern, count };
const keys = [];
const stream = this._client.scanStream(params);
stream.on('data', (resultKeys) => {
stream.on('data', resultKeys => {
for (let i = 0; i < resultKeys.length; i++) {
keys.push(resultKeys[i]);
}
@ -47,18 +45,15 @@ export default class RedisClient {
}
/**
* increment value of a key by 1 and set a ttl
* @param {string} key - key holding the value
* @param {number} expiry - expiry in seconds
* @param {callback} cb - callback
* @return {undefined}
*/
incrEx(key: string, expiry: number, cb) {
* increment value of a key by 1 and set a ttl
* @param {string} key - key holding the value
* @param {number} expiry - expiry in seconds
* @param {callback} cb - callback
* @return {undefined}
*/
incrEx(key, expiry, cb) {
return this._client
.multi([
['incr', key],
['expire', key, expiry],
])
.multi([['incr', key], ['expire', key, expiry]])
.exec(cb);
}
@ -69,7 +64,7 @@ export default class RedisClient {
* @param {callback} cb - callback
* @return {undefined}
*/
incrby(key: string, amount: number, cb) {
incrby(key, amount, cb) {
return this._client.incrby(key, amount, cb);
}
@ -81,12 +76,9 @@ export default class RedisClient {
* @param {callback} cb - callback
* @return {undefined}
*/
incrbyEx(key: string, amount: number, expiry: number, cb) {
incrbyEx(key, amount, expiry, cb) {
return this._client
.multi([
['incrby', key, amount],
['expire', key, expiry],
])
.multi([['incrby', key, amount], ['expire', key, expiry]])
.exec(cb);
}
@ -97,7 +89,7 @@ export default class RedisClient {
* @param {callback} cb - callback
* @return {undefined}
*/
decrby(key: string, amount: number, cb) {
decrby(key, amount, cb) {
return this._client.decrby(key, amount, cb);
}
@ -107,7 +99,7 @@ export default class RedisClient {
* @param {callback} cb - callback
* @return {undefined}
*/
get(key: string, cb) {
get(key, cb) {
return this._client.get(key, cb);
}
@ -119,17 +111,17 @@ export default class RedisClient {
* If cb response returns 1, key exists.
* @return {undefined}
*/
exists(key: string, cb) {
exists(key, cb) {
return this._client.exists(key, cb);
}
/**
* execute a batch of commands
* @param {string[]} cmds - list of commands
* @param {callback} cb - callback
* @return {undefined}
*/
batch(cmds: string[], cb) {
* execute a batch of commands
* @param {string[]} cmds - list of commands
* @param {callback} cb - callback
* @return {undefined}
*/
batch(cmds, cb) {
return this._client.pipeline(cmds).exec(cb);
}
@ -142,7 +134,7 @@ export default class RedisClient {
* @param {callback} cb - callback
* @return {undefined}
*/
zadd(key: string, score: number, value: string, cb) {
zadd(key, score, value, cb) {
return this._client.zadd(key, score, value, cb);
}
@ -155,7 +147,7 @@ export default class RedisClient {
* @param {function} cb - callback
* @return {undefined}
*/
zcard(key: string, cb) {
zcard(key, cb) {
return this._client.zcard(key, cb);
}
@ -169,7 +161,7 @@ export default class RedisClient {
* @param {function} cb - callback
* @return {undefined}
*/
zscore(key: string, value: string, cb) {
zscore(key, value, cb) {
return this._client.zscore(key, value, cb);
}
@ -182,7 +174,7 @@ export default class RedisClient {
* The cb response returns number of values removed
* @return {undefined}
*/
zrem(key: string, value: string | any[], cb) {
zrem(key, value, cb) {
return this._client.zrem(key, value, cb);
}
@ -194,7 +186,7 @@ export default class RedisClient {
* @param {function} cb - callback
* @return {undefined}
*/
zrange(key: string, start: number, end: number, cb) {
zrange(key, start, end, cb) {
return this._client.zrange(key, start, end, cb);
}
@ -208,7 +200,7 @@ export default class RedisClient {
* @param {function} cb - callback
* @return {undefined}
*/
zrangebyscore(key: string, min: number | string, max: number | string, cb) {
zrangebyscore(key, min, max, cb) {
return this._client.zrangebyscore(key, min, max, cb);
}
@ -218,7 +210,7 @@ export default class RedisClient {
* @param {function} cb - callback
* @return {undefined}
*/
ttl(key: string, cb) {
ttl(key, cb) {
return this._client.ttl(key, cb);
}
@ -234,3 +226,5 @@ export default class RedisClient {
return this._client.client('list', cb);
}
}
module.exports = RedisClient;

View File

@ -1,18 +1,13 @@
import async from 'async';
import RedisClient from './RedisClient';
export default class StatsClient {
_redis?: RedisClient;
_interval: number;
_expiry: number;
const async = require('async');
class StatsClient {
/**
* @constructor
* @param {object} redisClient - RedisClient instance
* @param {number} interval - sampling interval by seconds
* @param {number} expiry - sampling duration by seconds
*/
constructor(redisClient: RedisClient, interval: number, expiry: number) {
constructor(redisClient, interval, expiry) {
this._redis = redisClient;
this._interval = interval;
this._expiry = expiry;
@ -29,9 +24,9 @@ export default class StatsClient {
* @param {object} d - Date instance
* @return {number} timestamp - normalized to the nearest interval
*/
_normalizeTimestamp(d: Date): number {
_normalizeTimestamp(d) {
const s = d.getSeconds();
return d.setSeconds(s - (s % this._interval), 0);
return d.setSeconds(s - s % this._interval, 0);
}
/**
@ -39,7 +34,7 @@ export default class StatsClient {
* @param {object} d - Date instance
* @return {number} timestamp - set to the previous interval
*/
_setPrevInterval(d: Date): number {
_setPrevInterval(d) {
return d.setSeconds(d.getSeconds() - this._interval);
}
@ -49,7 +44,7 @@ export default class StatsClient {
* @param {Date} date - Date instance
* @return {string} key - key for redis
*/
buildKey(name: string, date: Date): string {
buildKey(name, date) {
return `${name}:${this._normalizeTimestamp(date)}`;
}
@ -59,7 +54,7 @@ export default class StatsClient {
* @param {array} arr - Date instance
* @return {string} key - key for redis
*/
_getCount(arr: any[]): string {
_getCount(arr) {
return arr.reduce((prev, a) => {
let num = parseInt(a[1], 10);
num = Number.isNaN(num) ? 0 : num;
@ -74,7 +69,7 @@ export default class StatsClient {
* @param {function} cb - callback
* @return {undefined}
*/
reportNewRequest(id: string, incr: number, cb) {
reportNewRequest(id, incr, cb) {
if (!this._redis) {
return undefined;
}
@ -86,8 +81,8 @@ export default class StatsClient {
callback = incr;
amount = 1;
} else {
callback = cb && typeof cb === 'function' ? cb : this._noop;
amount = typeof incr === 'number' ? incr : 1;
callback = (cb && typeof cb === 'function') ? cb : this._noop;
amount = (typeof incr === 'number') ? incr : 1;
}
const key = this.buildKey(`${id}:requests`, new Date());
@ -102,7 +97,7 @@ export default class StatsClient {
* @param {function} [cb] - callback
* @return {undefined}
*/
incrementKey(key: string, incr: number, cb) {
incrementKey(key, incr, cb) {
const callback = cb || this._noop;
return this._redis.incrby(key, incr, callback);
}
@ -114,18 +109,18 @@ export default class StatsClient {
* @param {function} [cb] - callback
* @return {undefined}
*/
decrementKey(key: string, decr: number, cb) {
decrementKey(key, decr, cb) {
const callback = cb || this._noop;
return this._redis.decrby(key, decr, callback);
}
/**
* report/record a request that ended up being a 500 on the server
* @param {string} id - service identifier
* @param {callback} cb - callback
* @return {undefined}
*/
report500(id: string, cb) {
* report/record a request that ended up being a 500 on the server
* @param {string} id - service identifier
* @param {callback} cb - callback
* @return {undefined}
*/
report500(id, cb) {
if (!this._redis) {
return undefined;
}
@ -141,46 +136,41 @@ export default class StatsClient {
* @param {callback} cb - callback to call with the err/result
* @return {undefined}
*/
getAllStats(log, ids: any[], cb) {
getAllStats(log, ids, cb) {
if (!this._redis) {
return cb(null, {});
}
const statsRes = {
requests: 0,
'requests': 0,
'500s': 0,
sampleDuration: this._expiry,
'sampleDuration': this._expiry,
};
let requests = 0;
let errors = 0;
// for now set concurrency to default of 10
return async.eachLimit(
ids,
10,
(id, done) => {
this.getStats(log, id, (err, res) => {
if (err) {
return done(err);
}
requests += res.requests;
errors += res['500s'];
return done();
});
},
(error) => {
if (error) {
log.error('error getting stats', {
error,
method: 'StatsClient.getAllStats',
});
return cb(null, statsRes);
return async.eachLimit(ids, 10, (id, done) => {
this.getStats(log, id, (err, res) => {
if (err) {
return done(err);
}
statsRes.requests = requests;
statsRes['500s'] = errors;
requests += res.requests;
errors += res['500s'];
return done();
});
}, error => {
if (error) {
log.error('error getting stats', {
error,
method: 'StatsClient.getAllStats',
});
return cb(null, statsRes);
}
);
statsRes.requests = requests;
statsRes['500s'] = errors;
return cb(null, statsRes);
});
}
/**
@ -190,7 +180,7 @@ export default class StatsClient {
* @param {callback} cb - callback to call with the err/result
* @return {undefined}
*/
getStats(log, id: string, cb) {
getStats(log, id, cb) {
if (!this._redis) {
return cb(null, {});
}
@ -215,9 +205,9 @@ export default class StatsClient {
* index 1 contains the result
*/
const statsRes = {
requests: 0,
'requests': 0,
'500s': 0,
sampleDuration: this._expiry,
'sampleDuration': this._expiry,
};
if (err) {
log.error('error getting stats', {
@ -225,10 +215,10 @@ export default class StatsClient {
method: 'StatsClient.getStats',
});
/**
* Redis for stats is not a critial component, ignoring
* any error here as returning an InternalError
* would be confused with the health of the service
*/
* Redis for stats is not a critial component, ignoring
* any error here as returning an InternalError
* would be confused with the health of the service
*/
return cb(null, statsRes);
}
statsRes.requests = this._getCount(results[0]);
@ -237,3 +227,5 @@ export default class StatsClient {
});
}
}
module.exports = StatsClient;

View File

@ -1,5 +1,6 @@
import async from 'async';
import StatsClient from './StatsClient';
const async = require('async');
const StatsClient = require('./StatsClient');
/**
* @class StatsModel
@ -7,39 +8,40 @@ import StatsClient from './StatsClient';
* @classdesc Extend and overwrite how timestamps are normalized by minutes
* rather than by seconds
*/
export default class StatsModel extends StatsClient {
class StatsModel extends StatsClient {
/**
* Utility method to convert 2d array rows to columns, and vice versa
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip
* @param {array} arrays - 2d array of integers
* @return {array} converted array
*/
_zip(arrays: number[][]) {
if (arrays.length > 0 && arrays.every((a) => Array.isArray(a))) {
return arrays[0].map((_, i) => arrays.map((a) => a[i]));
* Utility method to convert 2d array rows to columns, and vice versa
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip
* @param {array} arrays - 2d array of integers
* @return {array} converted array
*/
_zip(arrays) {
if (arrays.length > 0 && arrays.every(a => Array.isArray(a))) {
return arrays[0].map((_, i) => arrays.map(a => a[i]));
}
return [];
}
/**
* normalize to the nearest interval
* @param {object} d - Date instance
* @return {number} timestamp - normalized to the nearest interval
*/
_normalizeTimestamp(d: Date) {
* normalize to the nearest interval
* @param {object} d - Date instance
* @return {number} timestamp - normalized to the nearest interval
*/
_normalizeTimestamp(d) {
const m = d.getMinutes();
return d.setMinutes(m - (m % Math.floor(this._interval / 60)), 0, 0);
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0);
}
/**
* override the method to get the count as an array of integers separated
* by each interval
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
* @param {array} arr - each index contains the result of each batch command
* where index 0 signifies the error and index 1 contains the result
* @return {array} array of integers, ordered from most recent interval to
* oldest interval with length of (expiry / interval)
*/
* override the method to get the count as an array of integers separated
* by each interval
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
* @param {array} arr - each index contains the result of each batch command
* where index 0 signifies the error and index 1 contains the result
* @return {array} array of integers, ordered from most recent interval to
* oldest interval with length of (expiry / interval)
*/
_getCount(arr) {
const size = Math.floor(this._expiry / this._interval);
const array = arr.reduce((store, i) => {
@ -56,23 +58,23 @@ export default class StatsModel extends StatsClient {
}
/**
* wrapper on `getStats` that handles a list of keys
* override the method to reduce the returned 2d array from `_getCount`
* @param {object} log - Werelogs request logger
* @param {array} ids - service identifiers
* @param {callback} cb - callback to call with the err/result
* @return {undefined}
*/
getAllStats(log, ids: string[], cb) {
* wrapper on `getStats` that handles a list of keys
* override the method to reduce the returned 2d array from `_getCount`
* @param {object} log - Werelogs request logger
* @param {array} ids - service identifiers
* @param {callback} cb - callback to call with the err/result
* @return {undefined}
*/
getAllStats(log, ids, cb) {
if (!this._redis) {
return cb(null, {});
}
const size = Math.floor(this._expiry / this._interval);
const statsRes = {
requests: Array(size).fill(0),
'requests': Array(size).fill(0),
'500s': Array(size).fill(0),
sampleDuration: this._expiry,
'sampleDuration': this._expiry,
};
const requests = [];
const errors = [];
@ -116,9 +118,9 @@ export default class StatsModel extends StatsClient {
* @param {function} cb - Callback
* @return {undefined}
*/
getAllGlobalStats(ids: string[], log, cb) {
const reqsKeys = ids.map((key) => ['get', key]);
return this._redis!.batch(reqsKeys, (err, res) => {
getAllGlobalStats(ids, log, cb) {
const reqsKeys = ids.map(key => (['get', key]));
return this._redis.batch(reqsKeys, (err, res) => {
const statsRes = { requests: 0 };
if (err) {
log.error('error getting metrics', {
@ -147,7 +149,7 @@ export default class StatsModel extends StatsClient {
* @param {Date} d - Date instance
* @return {number} timestamp - normalized to the nearest hour
*/
normalizeTimestampByHour(d: Date) {
normalizeTimestampByHour(d) {
return d.setMinutes(0, 0, 0);
}
@ -156,7 +158,7 @@ export default class StatsModel extends StatsClient {
* @param {Date} d - Date instance
* @return {number} timestamp - one hour prior to date passed
*/
_getDatePreviousHour(d: Date) {
_getDatePreviousHour(d) {
return d.setHours(d.getHours() - 1);
}
@ -165,8 +167,8 @@ export default class StatsModel extends StatsClient {
* @param {number} epoch - epoch time
* @return {array} array of sorted set key timestamps
*/
getSortedSetHours(epoch: number) {
const timestamps: number[] = [];
getSortedSetHours(epoch) {
const timestamps = [];
let date = this.normalizeTimestampByHour(new Date(epoch));
while (timestamps.length < 24) {
timestamps.push(date);
@ -180,7 +182,7 @@ export default class StatsModel extends StatsClient {
* @param {number} epoch - epoch time
* @return {string} normalized hour timestamp for given time
*/
getSortedSetCurrentHour(epoch: number) {
getSortedSetCurrentHour(epoch) {
return this.normalizeTimestampByHour(new Date(epoch));
}
@ -192,8 +194,8 @@ export default class StatsModel extends StatsClient {
* @param {callback} cb - callback
* @return {undefined}
*/
addToSortedSet(key: string, score: number, value: string, cb) {
this._redis!.exists(key, (err, resCode) => {
addToSortedSet(key, score, value, cb) {
this._redis.exists(key, (err, resCode) => {
if (err) {
return cb(err);
}
@ -202,7 +204,8 @@ export default class StatsModel extends StatsClient {
const msInADay = 24 * 60 * 60 * 1000;
const nearestHour = this.normalizeTimestampByHour(new Date());
// in seconds
const ttl = Math.ceil((msInADay - (Date.now() - nearestHour)) / 1000);
const ttl = Math.ceil(
(msInADay - (Date.now() - nearestHour)) / 1000);
const cmds = [
['zadd', key, score, value],
['expire', key, ttl],
@ -211,7 +214,7 @@ export default class StatsModel extends StatsClient {
if (err) {
return cb(err);
}
const cmdErr = res.find((r) => r[0] !== null);
const cmdErr = res.find(r => r[0] !== null);
if (cmdErr) {
return cb(cmdErr);
}
@ -219,7 +222,9 @@ export default class StatsModel extends StatsClient {
return cb(null, successResponse);
});
}
return this._redis!.zadd(key, score, value, cb);
return this._redis.zadd(key, score, value, cb);
});
}
}
module.exports = StatsModel;

View File

@ -1,13 +1,13 @@
import promClient from 'prom-client';
const promClient = require('prom-client');
const collectDefaultMetricsIntervalMs =
process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined
? Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10)
: 10000;
process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined ?
Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10) :
10000;
promClient.collectDefaultMetrics({ timeout: collectDefaultMetricsIntervalMs });
export default class ZenkoMetrics {
class ZenkoMetrics {
static createCounter(params) {
return new promClient.Counter(params);
}
@ -36,3 +36,5 @@ export default class ZenkoMetrics {
return promClient.register.contentType;
}
}
module.exports = ZenkoMetrics;

View File

@ -1,17 +1,11 @@
import errors from '../errors';
const errors = require('../errors');
const validServices = {
aws: ['s3', 'iam', 'sts', 'ring'],
scality: ['utapi', 'sso'],
};
export default class ARN {
_partition: string;
_service: string;
_region: string | null;
_accountId: string | null;
_resource: string;
class ARN {
/**
*
* Create an ARN object from its individual components
@ -23,7 +17,7 @@ export default class ARN {
* @param {string} [accountId] - AWS 12-digit account ID
* @param {string} resource - AWS resource path (e.g. 'foo/bar')
*/
constructor(partition: string, service: string, region: string, accountId: string, resource: string) {
constructor(partition, service, region, accountId, resource) {
this._partition = partition;
this._service = service;
this._region = region || null;
@ -31,9 +25,9 @@ export default class ARN {
this._resource = resource;
}
static createFromString(arnStr: string) {
static createFromString(arnStr) {
const [arn, partition, service, region, accountId,
resourceType, resource] = arnStr.split(':');
resourceType, resource] = arnStr.split(':');
if (arn !== 'arn') {
return { error: errors.InvalidArgument.customizeDescription(
@ -63,8 +57,8 @@ export default class ARN {
`bad ARN: bad account ID "${accountId}": ` +
'must be a 12-digit number or "*"') };
}
const fullResource = resource !== undefined ?
`${resourceType}:${resource}` : resourceType;
const fullResource = (resource !== undefined ?
`${resourceType}:${resource}` : resourceType);
return new ARN(partition, service, region, accountId, fullResource);
}
@ -85,26 +79,28 @@ export default class ARN {
}
isIAMAccount() {
return this.getService() === 'iam' &&
this.getAccountId() !== null &&
this.getAccountId() !== '*' &&
this.getResource() === 'root';
return this.getService() === 'iam'
&& this.getAccountId() !== null
&& this.getAccountId() !== '*'
&& this.getResource() === 'root';
}
isIAMUser() {
return this.getService() === 'iam' &&
this.getAccountId() !== null &&
this.getAccountId() !== '*' &&
this.getResource().startsWith('user/');
return this.getService() === 'iam'
&& this.getAccountId() !== null
&& this.getAccountId() !== '*'
&& this.getResource().startsWith('user/');
}
isIAMRole() {
return this.getService() === 'iam' &&
this.getAccountId() !== null &&
this.getResource().startsWith('role');
return this.getService() === 'iam'
&& this.getAccountId() !== null
&& this.getResource().startsWith('role');
}
toString() {
return ['arn', this.getPartition(), this.getService(),
this.getRegion(), this.getAccountId(), this.getResource()]
this.getRegion(), this.getAccountId(), this.getResource()]
.join(':');
}
}
module.exports = ARN;

View File

@ -1,13 +1,7 @@
import { legacyLocations } from '../constants';
import escapeForXml from '../s3middleware/escapeForXml';
export default class BackendInfo {
_config;
_objectLocationConstraint;
_bucketLocationConstraint;
_requestEndpoint;
_legacyLocationConstraint;
const { legacyLocations } = require('../constants');
const escapeForXml = require('../s3middleware/escapeForXml');
class BackendInfo {
/**
* Represents the info necessary to evaluate which data backend to use
* on a data put call.
@ -58,9 +52,9 @@ export default class BackendInfo {
*/
static isRequestEndpointPresent(config, requestEndpoint, log) {
if (Object.keys(config.restEndpoints).
indexOf(requestEndpoint) < 0) {
indexOf(requestEndpoint) < 0) {
log.trace('requestEndpoint does not match config restEndpoints',
{ requestEndpoint });
{ requestEndpoint });
return false;
}
return true;
@ -76,10 +70,10 @@ export default class BackendInfo {
*/
static isRequestEndpointValueValid(config, requestEndpoint, log) {
if (Object.keys(config.locationConstraints).
indexOf(config.restEndpoints[requestEndpoint]) < 0) {
indexOf(config.restEndpoints[requestEndpoint]) < 0) {
log.trace('the default locationConstraint for request' +
'Endpoint does not match any config locationConstraint',
{ requestEndpoint });
{ requestEndpoint });
return false;
}
return true;
@ -116,7 +110,7 @@ export default class BackendInfo {
*/
static isValidRequestEndpointOrBackend(config, requestEndpoint, log) {
if (!BackendInfo.isRequestEndpointPresent(config, requestEndpoint,
log)) {
log)) {
return BackendInfo.isMemOrFileBackend(config, log);
}
return BackendInfo.isRequestEndpointValueValid(config, requestEndpoint,
@ -138,7 +132,7 @@ export default class BackendInfo {
bucketLocationConstraint, requestEndpoint, log) {
if (objectLocationConstraint) {
if (BackendInfo.isValidLocationConstraint(config,
objectLocationConstraint, log)) {
objectLocationConstraint, log)) {
log.trace('objectLocationConstraint is valid');
return { isValid: true };
}
@ -149,7 +143,7 @@ export default class BackendInfo {
}
if (bucketLocationConstraint) {
if (BackendInfo.isValidLocationConstraint(config,
bucketLocationConstraint, log)) {
bucketLocationConstraint, log)) {
log.trace('bucketLocationConstraint is valid');
return { isValid: true };
}
@ -165,7 +159,7 @@ export default class BackendInfo {
return { isValid: true, legacyLocationConstraint };
}
if (!BackendInfo.isValidRequestEndpointOrBackend(config,
requestEndpoint, log)) {
requestEndpoint, log)) {
return { isValid: false, description: 'Endpoint Location Error - ' +
`Your endpoint "${requestEndpoint}" is not in restEndpoints ` +
'in your config OR the default location constraint for request ' +
@ -173,7 +167,7 @@ export default class BackendInfo {
'match any config locationConstraint - Please update.' };
}
if (BackendInfo.isRequestEndpointPresent(config, requestEndpoint,
log)) {
log)) {
return { isValid: true };
}
return { isValid: true, defaultedToDataBackend: true };
@ -242,3 +236,5 @@ export default class BackendInfo {
return this._config.backends.data;
}
}
module.exports = BackendInfo;

View File

@ -2,9 +2,7 @@
* Helper class to ease access to the Azure specific information for
* storage accounts mapped to buckets.
*/
export default class BucketAzureInfo {
_data
class BucketAzureInfo {
/**
* @constructor
* @param {object} obj - Raw structure for the Azure info on storage account
@ -235,3 +233,5 @@ export default class BucketAzureInfo {
return this._data;
}
}
module.exports = BucketAzureInfo;

View File

@ -1,44 +1,19 @@
import assert from 'assert';
import uuid from 'uuid/v4';
const assert = require('assert');
const uuid = require('uuid/v4');
import { WebsiteConfiguration } from './WebsiteConfiguration';
import ReplicationConfiguration from './ReplicationConfiguration';
import LifecycleConfiguration from './LifecycleConfiguration';
import ObjectLockConfiguration from './ObjectLockConfiguration';
import BucketPolicy from './BucketPolicy';
import NotificationConfiguration from './NotificationConfiguration';
const { WebsiteConfiguration } = require('./WebsiteConfiguration');
const ReplicationConfiguration = require('./ReplicationConfiguration');
const LifecycleConfiguration = require('./LifecycleConfiguration');
const ObjectLockConfiguration = require('./ObjectLockConfiguration');
const BucketPolicy = require('./BucketPolicy');
const NotificationConfiguration = require('./NotificationConfiguration');
// WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG
// BucketInfoModelVersion.md can be found in documentation/ at the root
// of this repository
const modelVersion = 14;
export default class BucketInfo {
_acl;
_name;
_owner;
_ownerDisplayName;
_creationDate;
_mdBucketModelVersion;
_transient;
_deleted;
_serverSideEncryption;
_versioningConfiguration;
_locationConstraint;
_readLocationConstraint;
_websiteConfiguration;
_replicationConfiguration;
_cors;
_lifecycleConfiguration;
_bucketPolicy;
_uid;
_isNFS;
_ingestion;
_azureInfo;
_objectLockEnabled;
_objectLockConfiguration;
_notificationConfiguration;
class BucketInfo {
/**
* Represents all bucket information.
* @constructor
@ -94,13 +69,13 @@ export default class BucketInfo {
* @param {object} [notificationConfiguration] - bucket notification configuration
*/
constructor(name, owner, ownerDisplayName, creationDate,
mdBucketModelVersion, acl, transient, deleted,
serverSideEncryption, versioningConfiguration,
locationConstraint, websiteConfiguration, cors,
replicationConfiguration, lifecycleConfiguration,
bucketPolicy, uid, readLocationConstraint, isNFS,
ingestionConfig, azureInfo, objectLockEnabled,
objectLockConfiguration, notificationConfiguration) {
mdBucketModelVersion, acl, transient, deleted,
serverSideEncryption, versioningConfiguration,
locationConstraint, websiteConfiguration, cors,
replicationConfiguration, lifecycleConfiguration,
bucketPolicy, uid, readLocationConstraint, isNFS,
ingestionConfig, azureInfo, objectLockEnabled,
objectLockConfiguration, notificationConfiguration) {
assert.strictEqual(typeof name, 'string');
assert.strictEqual(typeof owner, 'string');
assert.strictEqual(typeof ownerDisplayName, 'string');
@ -119,7 +94,7 @@ export default class BucketInfo {
if (serverSideEncryption) {
assert.strictEqual(typeof serverSideEncryption, 'object');
const { cryptoScheme, algorithm, masterKeyId,
configuredMasterKeyId, mandatory } = serverSideEncryption;
configuredMasterKeyId, mandatory } = serverSideEncryption;
assert.strictEqual(typeof cryptoScheme, 'number');
assert.strictEqual(typeof algorithm, 'string');
assert.strictEqual(typeof masterKeyId, 'string');
@ -793,3 +768,5 @@ export default class BucketInfo {
return this;
}
}
module.exports = BucketInfo;

View File

@ -1,7 +1,7 @@
import assert from 'assert';
const assert = require('assert');
import errors from '../errors';
import { validateResourcePolicy } from '../policy/policyValidator';
const errors = require('../errors');
const { validateResourcePolicy } = require('../policy/policyValidator');
/**
* Format of json policy:
@ -49,10 +49,7 @@ const objectActions = [
's3:PutObjectTagging',
];
export default class BucketPolicy {
_json
_policy
class BucketPolicy {
/**
* Create a Bucket Policy instance
* @param {string} json - the json policy
@ -78,11 +75,8 @@ export default class BucketPolicy {
*/
_getPolicy() {
if (!this._json || this._json === '') {
return {
error: errors.MalformedPolicy.customizeDescription(
'request json is empty or undefined'
),
};
return { error: errors.MalformedPolicy.customizeDescription(
'request json is empty or undefined') };
}
const validSchema = validateResourcePolicy(this._json);
if (validSchema.error) {
@ -110,32 +104,25 @@ export default class BucketPolicy {
* @return {error} - contains error or empty obj
*/
_validateActionResource() {
const invalid = this._policy.Statement.every((s) => {
const actions =
typeof s.Action === 'string' ? [s.Action] : s.Action;
const resources =
typeof s.Resource === 'string' ? [s.Resource] : s.Resource;
const objectAction = actions.some(
(a) => a.includes('Object') || objectActions.includes(a)
);
const invalid = this._policy.Statement.every(s => {
const actions = typeof s.Action === 'string' ?
[s.Action] : s.Action;
const resources = typeof s.Resource === 'string' ?
[s.Resource] : s.Resource;
const objectAction = actions.some(a =>
a.includes('Object') || objectActions.includes(a));
// wildcardObjectAction checks for actions such as 's3:*' or
// 's3:Put*' but will return false for actions such as
// 's3:PutBucket*'
const wildcardObjectAction = actions.some(
(a) => a.includes('*') && !a.includes('Bucket')
);
const objectResource = resources.some((r) => r.includes('/'));
return (
(objectAction && !objectResource) ||
(objectResource && !objectAction && !wildcardObjectAction)
);
a => a.includes('*') && !a.includes('Bucket'));
const objectResource = resources.some(r => r.includes('/'));
return ((objectAction && !objectResource) ||
(objectResource && !objectAction && !wildcardObjectAction));
});
if (invalid) {
return {
error: errors.MalformedPolicy.customizeDescription(
'Action does not apply to any resource(s) in statement'
),
};
return { error: errors.MalformedPolicy.customizeDescription(
'Action does not apply to any resource(s) in statement') };
}
return {};
}
@ -152,3 +139,5 @@ export default class BucketPolicy {
assert.deepStrictEqual(validated, { error: null, valid: true });
}
}
module.exports = BucketPolicy;

View File

@ -1,9 +1,9 @@
import assert from 'assert';
import UUID from 'uuid';
const assert = require('assert');
const UUID = require('uuid');
import errors from '../errors';
import LifecycleRule from './LifecycleRule';
import escapeForXml from '../s3middleware/escapeForXml';
const errors = require('../errors');
const LifecycleRule = require('./LifecycleRule');
const escapeForXml = require('../s3middleware/escapeForXml');
const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer.
@ -83,7 +83,7 @@ const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer.
};
*/
export default class LifecycleConfiguration {
class LifecycleConfiguration {
/**
* Create a Lifecycle Configuration instance
* @param {string} xml - the parsed xml
@ -381,7 +381,7 @@ export default class LifecycleConfiguration {
if (!tags[i].Key || !tags[i].Value) {
tagObj.error =
errors.MissingRequiredParameter.customizeDescription(
'Tag XML does not contain both Key and Value');
'Tag XML does not contain both Key and Value');
break;
}
@ -929,7 +929,7 @@ export default class LifecycleConfiguration {
const daysInt = parseInt(subExp.Days[0], 10);
if (daysInt < 1) {
expObj.error = errors.InvalidArgument.customizeDescription(
'Expiration days is not a positive integer');
'Expiration days is not a positive integer');
} else {
expObj.days = daysInt;
}
@ -1125,10 +1125,10 @@ export default class LifecycleConfiguration {
const { noncurrentDays, storageClass } = transition;
xml.push(
`<${actionName}>`,
`<NoncurrentDays>${noncurrentDays}` +
`<NoncurrentDays>${noncurrentDays}` +
'</NoncurrentDays>',
`<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>`,
`<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>`
);
});
Action = xml.join('');
@ -1146,9 +1146,9 @@ export default class LifecycleConfiguration {
}
xml.push(
`<${actionName}>`,
element,
`<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>`,
element,
`<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>`
);
});
Action = xml.join('');
@ -1220,3 +1220,5 @@ export default class LifecycleConfiguration {
return { Rules: rulesJSON };
}
}
module.exports = LifecycleConfiguration;

View File

@ -1,11 +1,11 @@
import uuid from 'uuid/v4';
const uuid = require('uuid/v4');
/**
* @class LifecycleRule
*
* @classdesc Simple get/set class to build a single Rule
*/
export default class LifecycleRule {
class LifecycleRule {
constructor(id, status) {
// defaults
this.id = id || uuid();
@ -134,3 +134,5 @@ export default class LifecycleRule {
return this;
}
}
module.exports = LifecycleRule;

View File

@ -1,11 +1,11 @@
import assert from 'assert';
import UUID from 'uuid';
const assert = require('assert');
const UUID = require('uuid');
import {
const {
supportedNotificationEvents,
notificationArnPrefix,
} from '../constants';
import errors from '../errors';
} = require('../constants');
const errors = require('../errors');
/**
* Format of xml request:
@ -27,7 +27,7 @@ import errors from '../errors';
* </NotificationConfiguration>
*/
/**
/**
* Format of config:
*
* config = {
@ -51,7 +51,7 @@ import errors from '../errors';
* }
*/
export default class NotificationConfiguration {
class NotificationConfiguration {
/**
* Create a Notification Configuration instance
* @param {string} xml - parsed configuration xml
@ -307,3 +307,5 @@ export default class NotificationConfiguration {
return;
}
}
module.exports = NotificationConfiguration;

View File

@ -1,5 +1,6 @@
import assert from 'assert';
import errors from '../errors';
const assert = require('assert');
const errors = require('../errors');
/**
* Format of xml request:
@ -16,7 +17,7 @@ import errors from '../errors';
* </ObjectLockConfiguration>
*/
/**
/**
* Format of config:
*
* config = {
@ -26,7 +27,7 @@ import errors from '../errors';
* }
* }
*/
export default class ObjectLockConfiguration {
class ObjectLockConfiguration {
/**
* Create an Object Lock Configuration instance
* @param {string} xml - the parsed configuration xml
@ -233,3 +234,5 @@ export default class ObjectLockConfiguration {
'</ObjectLockConfiguration>';
}
}
module.exports = ObjectLockConfiguration;

View File

@ -1,15 +1,16 @@
import * as crypto from 'crypto';
const crypto = require('crypto');
import * as constants from '../constants';
import * as VersionIDUtils from '../versioning/VersionID';
const constants = require('../constants');
const VersionIDUtils = require('../versioning/VersionID');
import ObjectMDLocation from './ObjectMDLocation';
const ObjectMDLocation = require('./ObjectMDLocation');
/**
* Class to manage metadata object for regular s3 objects (instead of
* mpuPart metadata for example)
*/
export default class ObjectMD {
class ObjectMD {
/**
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
* reserved for internal use, users should call
@ -148,7 +149,7 @@ export default class ObjectMD {
Object.assign(this._data, objMd._data);
Object.assign(this._data.replicationInfo,
objMd._data.replicationInfo);
objMd._data.replicationInfo);
}
_updateFromParsedJSON(objMd) {
@ -1189,3 +1190,5 @@ export default class ObjectMD {
return this._data;
}
}
module.exports = ObjectMD;

View File

@ -2,7 +2,7 @@
* Helper class to ease access to the Azure specific information for
* Blob and Container objects.
*/
export default class ObjectMDAzureInfo {
class ObjectMDAzureInfo {
/**
* @constructor
* @param {object} obj - Raw structure for the Azure info on Blob/Container
@ -158,3 +158,5 @@ export default class ObjectMDAzureInfo {
return this._data;
}
}
module.exports = ObjectMDAzureInfo;

View File

@ -2,7 +2,8 @@
* Helper class to ease access to a single data location in metadata
* 'location' array
*/
export default class ObjectMDLocation {
class ObjectMDLocation {
/**
* @constructor
* @param {object} locationObj - single data location info
@ -126,3 +127,5 @@ export default class ObjectMDLocation {
return this._data;
}
}
module.exports = ObjectMDLocation;

View File

@ -1,9 +1,9 @@
import assert from 'assert';
import UUID from 'uuid';
const assert = require('assert');
const UUID = require('uuid');
import escapeForXml from '../s3middleware/escapeForXml';
import errors from '../errors';
import { isValidBucketName } from '../s3routes/routesUtils';
const escapeForXml = require('../s3middleware/escapeForXml');
const errors = require('../errors');
const { isValidBucketName } = require('../s3routes/routesUtils');
const MAX_RULES = 1000;
const RULE_ID_LIMIT = 255;
@ -37,19 +37,7 @@ const validStorageClasses = [
</ReplicationConfiguration>
*/
export default class ReplicationConfiguration {
_parsedXML
_log
_config
_configPrefixes
_configIDs
_role
_destination
_rules
_prevStorageClass
_hasScalityDestination
_preferredReadLocation
class ReplicationConfiguration {
/**
* Create a ReplicationConfiguration instance
* @param {string} xml - The parsed XML
@ -481,3 +469,5 @@ export default class ReplicationConfiguration {
});
}
}
module.exports = ReplicationConfiguration;

View File

@ -1,7 +1,4 @@
export class RoutingRule {
_redirect;
_condition;
class RoutingRule {
/**
* Represents a routing rule in a website configuration.
* @constructor
@ -55,12 +52,7 @@ export class RoutingRule {
}
}
export class WebsiteConfiguration {
_indexDocument;
_errorDocument;
_redirectAllRequestsTo;
_routingRules;
class WebsiteConfiguration {
/**
* Object that represents website configuration
* @constructor
@ -196,3 +188,8 @@ export class WebsiteConfiguration {
return this._routingRules;
}
}
module.exports = {
RoutingRule,
WebsiteConfiguration,
};

View File

@ -17,7 +17,7 @@ function shuffle(array) {
}
}
export default class RoundRobin {
class RoundRobin {
/**
* @constructor
* @param {object[]|string[]} hostsList - list of hosts to query
@ -111,7 +111,7 @@ export default class RoundRobin {
pickHost() {
if (this.logger) {
this.logger.debug('pick host',
{ host: this.getCurrentHost() });
{ host: this.getCurrentHost() });
}
const curHost = this.getCurrentHost();
++this.pickCount;
@ -163,7 +163,9 @@ export default class RoundRobin {
}
if (this.logger) {
this.logger.debug('round robin host',
{ newHost: this.getCurrentHost() });
{ newHost: this.getCurrentHost() });
}
}
}
module.exports = RoundRobin;

View File

@ -1,13 +1,16 @@
import * as http from 'http';
import * as https from 'https';
import assert from 'assert';
import { dhparam } from '../../https/dh2048';
import { ciphers } from '../../https/ciphers';
import errors from '../../errors';
import { checkSupportIPv6 } from './utils';
'use strict'; // eslint-disable-line
const http = require('http');
const https = require('https');
const assert = require('assert');
const dhparam = require('../../https/dh2048').dhparam;
const ciphers = require('../../https/ciphers').ciphers;
const errors = require('../../errors');
const { checkSupportIPv6 } = require('./utils');
export default class Server {
class Server {
/**
* @constructor
*
@ -428,16 +431,16 @@ export default class Server {
// Setting no delay of the socket to the value configured
sock.setNoDelay(this.isNoDelay());
sock.on('error', err => this._logger.info(
'socket error - request rejected', { error: err }));
'socket error - request rejected', { error: err }));
});
this._server.on('tlsClientError', (err, sock) =>
this._onClientError(err, sock));
this._onClientError(err, sock));
this._server.on('clientError', (err, sock) =>
this._onClientError(err, sock));
this._onClientError(err, sock));
this._server.on('checkContinue', (req, res) =>
this._onCheckContinue(req, res));
this._onCheckContinue(req, res));
this._server.on('checkExpectation', (req, res) =>
this._onCheckExpectation(req, res));
this._onCheckExpectation(req, res));
this._server.on('listening', () => this._onListening());
}
this._server.listen(this._port, this._address);
@ -456,3 +459,5 @@ export default class Server {
return this;
}
}
module.exports = Server;

View File

@ -1,5 +1,7 @@
import * as os from 'os';
import errors from '../../errors';
'use strict'; // eslint-disable-line
const os = require('os');
const errors = require('../../errors');
/**
* Parse the Range header into an object
@ -14,7 +16,7 @@ import errors from '../../errors';
* - an error attribute of type errors.InvalidArgument if the range
* syntax is invalid
*/
export function parseRangeSpec(rangeHeader) {
function parseRangeSpec(rangeHeader) {
const rangeMatch = /^bytes=([0-9]+)?-([0-9]+)?$/.exec(rangeHeader);
if (rangeMatch) {
const rangeValues = rangeMatch.slice(1, 3);
@ -53,7 +55,7 @@ export function parseRangeSpec(rangeHeader) {
* - or an 'error' attribute of type errors.InvalidRange if the
* requested range is out of object's boundaries.
*/
export function getByteRangeFromSpec(rangeSpec, objectSize) {
function getByteRangeFromSpec(rangeSpec, objectSize) {
if (rangeSpec.suffix !== undefined) {
if (rangeSpec.suffix === 0) {
// 0-byte suffix is always invalid (even on empty objects)
@ -70,8 +72,8 @@ export function getByteRangeFromSpec(rangeSpec, objectSize) {
if (rangeSpec.start < objectSize) {
// test is false if end is undefined
return { range: [rangeSpec.start,
(rangeSpec.end < objectSize ?
rangeSpec.end : objectSize - 1)] };
(rangeSpec.end < objectSize ?
rangeSpec.end : objectSize - 1)] };
}
return { error: errors.InvalidRange };
}
@ -93,7 +95,7 @@ export function getByteRangeFromSpec(rangeSpec, objectSize) {
* - or an 'error' attribute instead of type errors.InvalidRange if
* the requested range is out of object's boundaries.
*/
export function parseRange(rangeHeader, objectSize) {
function parseRange(rangeHeader, objectSize) {
const rangeSpec = parseRangeSpec(rangeHeader);
if (rangeSpec.error) {
// invalid range syntax is silently ignored in HTTP spec,
@ -103,8 +105,15 @@ export function parseRange(rangeHeader, objectSize) {
return getByteRangeFromSpec(rangeSpec, objectSize);
}
export function checkSupportIPv6() {
function checkSupportIPv6() {
const niList = os.networkInterfaces();
return Object.keys(niList).some(network =>
niList[network].some(intfc => intfc.family === 'IPv6'));
}
module.exports = {
parseRangeSpec,
getByteRangeFromSpec,
parseRange,
checkSupportIPv6,
};

View File

@ -1,8 +1,12 @@
import async from 'async';
import errors from '../../errors';
import TTLVCodec from './codec/ttlv';
import TlsTransport from './transport/tls';
import KMIP from '.';
'use strict'; // eslint-disable-line
/* eslint new-cap: "off" */
const async = require('async');
const errors = require('../../errors');
const TTLVCodec = require('./codec/ttlv.js');
const TlsTransport = require('./transport/tls.js');
const KMIP = require('.');
const CRYPTOGRAPHIC_OBJECT_TYPE = 'Symmetric Key';
const CRYPTOGRAPHIC_ALGORITHM = 'AES';
@ -51,12 +55,7 @@ function _arsenalError(err) {
if (typeof err === 'string') {
return errors.InternalError
.customizeDescription(`${messagePrefix} ${err}`);
} else if (
err instanceof Error ||
// INFO: The second part is here only for Jest, to remove when we'll be
// fully migrated to TS
(err && typeof err.message === 'string')
) {
} else if (err instanceof Error) {
return errors.InternalError
.customizeDescription(`${messagePrefix} ${err.message}`);
}
@ -91,8 +90,8 @@ function _negotiateProtocolVersion(client, logger, cb) {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::negotiateProtocolVersion',
{ error,
vendorIdentification: client.vendorIdentification });
{ error,
vendorIdentification: client.vendorIdentification });
return cb(error);
}
const majorVersions =
@ -103,8 +102,8 @@ function _negotiateProtocolVersion(client, logger, cb) {
majorVersions.length !== minorVersions.length) {
const error = _arsenalError('No suitable protocol version');
logger.error('KMIP::negotiateProtocolVersion',
{ error,
vendorIdentification: client.vendorIdentification });
{ error,
vendorIdentification: client.vendorIdentification });
return cb(error);
}
client.kmip.changeProtocolVersion(majorVersions[0], minorVersions[0]);
@ -127,8 +126,8 @@ function _mapExtensions(client, logger, cb) {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::mapExtensions',
{ error,
vendorIdentification: client.vendorIdentification });
{ error,
vendorIdentification: client.vendorIdentification });
return cb(error);
}
const extensionNames = response.lookup(searchFilter.extensionName);
@ -136,8 +135,8 @@ function _mapExtensions(client, logger, cb) {
if (extensionNames.length !== extensionTags.length) {
const error = _arsenalError('Inconsistent extension list');
logger.error('KMIP::mapExtensions',
{ error,
vendorIdentification: client.vendorIdentification });
{ error,
vendorIdentification: client.vendorIdentification });
return cb(error);
}
extensionNames.forEach((extensionName, idx) => {
@ -161,7 +160,7 @@ function _queryServerInformation(client, logger, cb) {
if (err) {
const error = _arsenalError(err);
logger.warn('KMIP::queryServerInformation',
{ error });
{ error });
/* no error returned, caller can keep going */
return cb();
}
@ -171,9 +170,9 @@ function _queryServerInformation(client, logger, cb) {
JSON.stringify(response.lookup(searchFilter.serverInformation)[0]));
logger.info('KMIP Server identified',
{ vendorIdentification: client.vendorIdentification,
serverInformation: client.serverInformation,
negotiatedProtocolVersion: client.kmip.protocolVersion });
{ vendorIdentification: client.vendorIdentification,
serverInformation: client.serverInformation,
negotiatedProtocolVersion: client.kmip.protocolVersion });
return cb();
});
}
@ -197,8 +196,8 @@ function _queryOperationsAndObjects(client, logger, cb) {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::queryOperationsAndObjects',
{ error,
vendorIdentification: client.vendorIdentification });
{ error,
vendorIdentification: client.vendorIdentification });
return cb(error);
}
const supportedOperations = response.lookup(searchFilter.operation);
@ -223,21 +222,22 @@ function _queryOperationsAndObjects(client, logger, cb) {
logger.warn('KMIP::queryOperationsAndObjects: ' +
'The KMIP Server announces that it ' +
'does not support all of the required features',
{ vendorIdentification: client.vendorIdentification,
serverInformation: client.serverInformation,
supportsEncrypt, supportsDecrypt,
supportsActivate, supportsRevoke,
supportsCreate, supportsDestroy,
supportsQuery, supportsSymmetricKeys });
{ vendorIdentification: client.vendorIdentification,
serverInformation: client.serverInformation,
supportsEncrypt, supportsDecrypt,
supportsActivate, supportsRevoke,
supportsCreate, supportsDestroy,
supportsQuery, supportsSymmetricKeys });
} else {
logger.info('KMIP Server provides the necessary feature set',
{ vendorIdentification: client.vendorIdentification });
{ vendorIdentification: client.vendorIdentification });
}
return cb();
});
}
export default class Client {
class Client {
/**
* Construct a high level KMIP driver suitable for cloudserver
* @param {Object} options - Instance options
@ -264,8 +264,8 @@ export default class Client {
this.vendorIdentification = '';
this.serverInformation = [];
this.kmip = new KMIP(CodecClass || TTLVCodec,
TransportClass || TlsTransport,
options);
TransportClass || TlsTransport,
options);
this.kmip.registerHandshakeFunction((logger, cb) => {
this._kmipHandshake(logger, cb);
});
@ -322,8 +322,8 @@ export default class Client {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::_activateBucketKey',
{ error,
serverInformation: this.serverInformation });
{ error,
serverInformation: this.serverInformation });
return cb(error);
}
const uniqueIdentifier =
@ -332,7 +332,7 @@ export default class Client {
const error = _arsenalError(
'Server did not return the expected identifier');
logger.error('KMIP::cipherDataKey',
{ error, uniqueIdentifier });
{ error, uniqueIdentifier });
return cb(error);
}
return cb(null, keyIdentifier);
@ -351,20 +351,20 @@ export default class Client {
const attributes = [];
if (!!this.options.bucketNameAttributeName) {
attributes.push(KMIP.Attribute('TextString',
this.options.bucketNameAttributeName,
bucketName));
this.options.bucketNameAttributeName,
bucketName));
}
attributes.push(...[
KMIP.Attribute('Enumeration', 'Cryptographic Algorithm',
CRYPTOGRAPHIC_ALGORITHM),
CRYPTOGRAPHIC_ALGORITHM),
KMIP.Attribute('Integer', 'Cryptographic Length',
CRYPTOGRAPHIC_LENGTH),
CRYPTOGRAPHIC_LENGTH),
KMIP.Attribute('Integer', 'Cryptographic Usage Mask',
this.kmip.encodeMask('Cryptographic Usage Mask',
CRYPTOGRAPHIC_USAGE_MASK))]);
this.kmip.encodeMask('Cryptographic Usage Mask',
CRYPTOGRAPHIC_USAGE_MASK))]);
if (this.options.compoundCreateActivate) {
attributes.push(KMIP.Attribute('Date-Time', 'Activation Date',
new Date(Date.UTC())));
new Date(Date.UTC())));
}
return this.kmip.request(logger, 'Create', [
@ -374,8 +374,8 @@ export default class Client {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::createBucketKey',
{ error,
serverInformation: this.serverInformation });
{ error,
serverInformation: this.serverInformation });
return cb(error);
}
const createdObjectType =
@ -386,7 +386,7 @@ export default class Client {
const error = _arsenalError(
'Server created an object of wrong type');
logger.error('KMIP::createBucketKey',
{ error, createdObjectType });
{ error, createdObjectType });
return cb(error);
}
if (!this.options.compoundCreateActivate) {
@ -411,16 +411,16 @@ export default class Client {
KMIP.TextString('Unique Identifier', bucketKeyId),
KMIP.Structure('Revocation Reason', [
KMIP.Enumeration('Revocation Reason Code',
'Cessation of Operation'),
'Cessation of Operation'),
KMIP.TextString('Revocation Message',
'About to be deleted'),
'About to be deleted'),
]),
], (err, response) => {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::_revokeBucketKey',
{ error,
serverInformation: this.serverInformation });
{ error,
serverInformation: this.serverInformation });
return cb(error);
}
const uniqueIdentifier =
@ -429,7 +429,7 @@ export default class Client {
const error = _arsenalError(
'Server did not return the expected identifier');
logger.error('KMIP::_revokeBucketKey',
{ error, uniqueIdentifier });
{ error, uniqueIdentifier });
return cb(error);
}
return cb();
@ -448,8 +448,8 @@ export default class Client {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::destroyBucketKey: revocation failed',
{ error,
serverInformation: this.serverInformation });
{ error,
serverInformation: this.serverInformation });
return cb(error);
}
return this.kmip.request(logger, 'Destroy', [
@ -458,8 +458,8 @@ export default class Client {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::destroyBucketKey',
{ error,
serverInformation: this.serverInformation });
{ error,
serverInformation: this.serverInformation });
return cb(error);
}
const uniqueIdentifier =
@ -468,7 +468,7 @@ export default class Client {
const error = _arsenalError(
'Server did not return the expected identifier');
logger.error('KMIP::destroyBucketKey',
{ error, uniqueIdentifier });
{ error, uniqueIdentifier });
return cb(error);
}
return cb();
@ -487,19 +487,19 @@ export default class Client {
* @callback called with (err, cipheredDataKey: Buffer)
*/
cipherDataKey(cryptoScheme,
masterKeyId,
plainTextDataKey,
logger,
cb) {
masterKeyId,
plainTextDataKey,
logger,
cb) {
return this.kmip.request(logger, 'Encrypt', [
KMIP.TextString('Unique Identifier', masterKeyId),
KMIP.Structure('Cryptographic Parameters', [
KMIP.Enumeration('Block Cipher Mode',
CRYPTOGRAPHIC_CIPHER_MODE),
CRYPTOGRAPHIC_CIPHER_MODE),
KMIP.Enumeration('Padding Method',
CRYPTOGRAPHIC_PADDING_METHOD),
CRYPTOGRAPHIC_PADDING_METHOD),
KMIP.Enumeration('Cryptographic Algorithm',
CRYPTOGRAPHIC_ALGORITHM),
CRYPTOGRAPHIC_ALGORITHM),
]),
KMIP.ByteString('Data', plainTextDataKey),
KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV),
@ -507,8 +507,8 @@ export default class Client {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::cipherDataKey',
{ error,
serverInformation: this.serverInformation });
{ error,
serverInformation: this.serverInformation });
return cb(error);
}
const uniqueIdentifier =
@ -518,7 +518,7 @@ export default class Client {
const error = _arsenalError(
'Server did not return the expected identifier');
logger.error('KMIP::cipherDataKey',
{ error, uniqueIdentifier });
{ error, uniqueIdentifier });
return cb(error);
}
return cb(null, data);
@ -536,19 +536,19 @@ export default class Client {
* @callback called with (err, plainTextDataKey: Buffer)
*/
decipherDataKey(cryptoScheme,
masterKeyId,
cipheredDataKey,
logger,
cb) {
masterKeyId,
cipheredDataKey,
logger,
cb) {
return this.kmip.request(logger, 'Decrypt', [
KMIP.TextString('Unique Identifier', masterKeyId),
KMIP.Structure('Cryptographic Parameters', [
KMIP.Enumeration('Block Cipher Mode',
CRYPTOGRAPHIC_CIPHER_MODE),
CRYPTOGRAPHIC_CIPHER_MODE),
KMIP.Enumeration('Padding Method',
CRYPTOGRAPHIC_PADDING_METHOD),
CRYPTOGRAPHIC_PADDING_METHOD),
KMIP.Enumeration('Cryptographic Algorithm',
CRYPTOGRAPHIC_ALGORITHM),
CRYPTOGRAPHIC_ALGORITHM),
]),
KMIP.ByteString('Data', cipheredDataKey),
KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV),
@ -556,8 +556,8 @@ export default class Client {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::decipherDataKey',
{ error,
serverInformation: this.serverInformation });
{ error,
serverInformation: this.serverInformation });
return cb(error);
}
const uniqueIdentifier =
@ -567,7 +567,7 @@ export default class Client {
const error = _arsenalError(
'Server did not return the right identifier');
logger.error('KMIP::decipherDataKey',
{ error, uniqueIdentifier });
{ error, uniqueIdentifier });
return cb(error);
}
return cb(null, data);
@ -599,3 +599,5 @@ export default class Client {
});
}
}
module.exports = Client;

View File

@ -1,4 +1,6 @@
import assert from 'assert';
'use strict'; // eslint-disable-line
const assert = require('assert');
function _lookup(decodedTTLV, path) {
@ -29,7 +31,7 @@ function _lookup(decodedTTLV, path) {
return res;
}
export default class Message {
class Message {
/**
* Construct a new abstract Message
* @param {Object} content - the content of the message
@ -48,3 +50,5 @@ export default class Message {
return _lookup(this.content, path);
}
}
module.exports = Message;

View File

@ -1,5 +1,8 @@
import KMIPTags from '../tags.json';
import KMIPMessage from '../Message';
'use strict'; // eslint-disable-line
/* eslint dot-notation: "off" */
const KMIPTags = require('../tags.json');
const KMIPMessage = require('../Message.js');
const UINT32_MAX = Math.pow(2, 32);
@ -23,7 +26,7 @@ function _throwError(logger, msg, data) {
throw Error(msg);
}
export default function TTLVCodec() {
function TTLVCodec() {
if (!new.target) {
return new TTLVCodec();
}
@ -52,15 +55,15 @@ export default function TTLVCodec() {
const property = {};
if (!TypeDecoder[elementType]) {
_throwError(logger,
'Unknown element type',
{ funcName, elementTag, elementType });
'Unknown element type',
{ funcName, elementTag, elementType });
}
const elementValue = value.slice(i + 8,
i + 8 + elementLength);
i + 8 + elementLength);
if (elementValue.length !== elementLength) {
_throwError(logger, 'BUG: Wrong buffer size',
{ funcName, elementLength,
bufferLength: elementValue.length });
{ funcName, elementLength,
bufferLength: elementValue.length });
}
property.type = TypeDecoder[elementType].name;
property.value = TypeDecoder[elementType]
@ -72,7 +75,7 @@ export default function TTLVCodec() {
const tagInfo = TagDecoder[elementTag];
if (!tagInfo) {
logger.debug('Unknown element tag',
{ funcName, elementTag });
{ funcName, elementTag });
property.tag = elementTag;
element['Unknown Tag'] = property;
} else {
@ -80,8 +83,8 @@ export default function TTLVCodec() {
if (tagInfo.name === 'Attribute Name') {
if (property.type !== 'TextString') {
_throwError(logger,
'Invalide type',
{ funcName, type: property.type });
'Invalide type',
{ funcName, type: property.type });
}
diversion = property.value;
}
@ -111,8 +114,8 @@ export default function TTLVCodec() {
}
const itemResult =
TypeEncoder[itemType].encode(itemTagName,
itemValue,
itemDiversion);
itemValue,
itemDiversion);
encodedValue = encodedValue
.concat(_ttlvPadVector(itemResult));
});
@ -130,9 +133,9 @@ export default function TTLVCodec() {
const fixedLength = 4;
if (fixedLength !== value.length) {
_throwError(logger,
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
}
return value.readUInt32BE(0);
},
@ -153,16 +156,16 @@ export default function TTLVCodec() {
const fixedLength = 8;
if (fixedLength !== value.length) {
_throwError(logger,
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
}
const longUInt = UINT32_MAX * value.readUInt32BE(0) +
value.readUInt32BE(4);
if (longUInt > Number.MAX_SAFE_INTEGER) {
_throwError(logger,
'53-bit overflow',
{ funcName, longUInt });
'53-bit overflow',
{ funcName, longUInt });
}
return longUInt;
},
@ -197,9 +200,9 @@ export default function TTLVCodec() {
const fixedLength = 4;
if (fixedLength !== value.length) {
_throwError(logger,
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
}
const enumValue = value.toString('hex');
const actualTag = diversion ? TagEncoder[diversion].value : tag;
@ -208,10 +211,10 @@ export default function TTLVCodec() {
!enumInfo.enumeration ||
!enumInfo.enumeration[enumValue]) {
return { tag,
value: enumValue,
message: 'Unknown enumeration value',
diversion,
};
value: enumValue,
message: 'Unknown enumeration value',
diversion,
};
}
return enumInfo.enumeration[enumValue];
},
@ -224,7 +227,7 @@ export default function TTLVCodec() {
const actualTag = diversion || tagName;
const encodedValue =
Buffer.from(TagEncoder[actualTag].enumeration[value],
'hex');
'hex');
return _ttlvPadVector([tag, type, length, encodedValue]);
},
},
@ -235,9 +238,9 @@ export default function TTLVCodec() {
const fixedLength = 8;
if (fixedLength !== value.length) {
_throwError(logger,
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
}
const msUInt = value.readUInt32BE(0);
const lsUInt = value.readUInt32BE(4);
@ -264,7 +267,7 @@ export default function TTLVCodec() {
const length = Buffer.alloc(4);
length.writeUInt32BE(value.length);
return _ttlvPadVector([tag, type, length,
Buffer.from(value, 'utf8')]);
Buffer.from(value, 'utf8')]);
},
},
'08': {
@ -286,17 +289,17 @@ export default function TTLVCodec() {
const fixedLength = 8;
if (fixedLength !== value.length) {
_throwError(logger,
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
}
const d = new Date(0);
const utcSeconds = UINT32_MAX * value.readUInt32BE(0) +
value.readUInt32BE(4);
if (utcSeconds > Number.MAX_SAFE_INTEGER) {
_throwError(logger,
'53-bit overflow',
{ funcName, utcSeconds });
'53-bit overflow',
{ funcName, utcSeconds });
}
d.setUTCSeconds(utcSeconds);
return d;
@ -320,9 +323,9 @@ export default function TTLVCodec() {
const fixedLength = 4;
if (fixedLength !== value.length) {
_throwError(logger,
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
}
return value.readInt32BE(0);
},
@ -412,8 +415,8 @@ export default function TTLVCodec() {
throw Error(`Unknown Type '${type}'`);
}
const itemValue = TypeEncoder[type].encode(key,
item[key].value,
item[key].diversion);
item[key].value,
item[key].diversion);
result = result.concat(_ttlvPadVector(itemValue));
});
});
@ -427,3 +430,5 @@ export default function TTLVCodec() {
};
return this;
}
module.exports = TTLVCodec;

View File

@ -1,6 +1,9 @@
import uuidv4 from 'uuid/v4';
'use strict'; // eslint-disable-line
/* eslint new-cap: "off" */
import Message from './Message';
const uuidv4 = require('uuid/v4');
const Message = require('./Message.js');
/* This client requires at least a KMIP 1.2 compatible server */
const DEFAULT_PROTOCOL_VERSION_MAJOR = 1;
@ -19,7 +22,7 @@ function _PrimitiveType(tagName, type, value) {
return { [tagName]: { type, value } };
}
export default class KMIP {
class KMIP {
/**
* Construct a new KMIP Object
* @param {Class} Codec -
@ -272,11 +275,11 @@ export default class KMIP {
KMIP.Structure('Request Header', [
KMIP.Structure('Protocol Version', [
KMIP.Integer('Protocol Version Major',
this.protocolVersion.major),
this.protocolVersion.major),
KMIP.Integer('Protocol Version Minor',
this.protocolVersion.minor)]),
this.protocolVersion.minor)]),
KMIP.Integer('Maximum Response Size',
this.maximumResponseSize),
this.maximumResponseSize),
KMIP.Integer('Batch Count', 1)]),
KMIP.Structure('Batch Item', [
KMIP.Enumeration('Operation', operation),
@ -289,7 +292,7 @@ export default class KMIP {
(err, conversation, rawResponse) => {
if (err) {
logger.error('KMIP::request: Failed to send message',
{ error: err });
{ error: err });
return cb(err);
}
const response = this._decodeMessage(logger, rawResponse);
@ -308,16 +311,16 @@ export default class KMIP {
this.transport.abortPipeline(conversation);
const error = Error('Invalid batch item ID returned');
logger.error('KMIP::request: failed',
{ resultUniqueBatchItemID, uuid, error });
{ resultUniqueBatchItemID, uuid, error });
return cb(error);
}
if (performedOperation !== operation) {
this.transport.abortPipeline(conversation);
const error = Error('Operation mismatch',
{ got: performedOperation,
expected: operation });
{ got: performedOperation,
expected: operation });
logger.error('KMIP::request: Operation mismatch',
{ error });
{ error });
return cb(error);
}
if (resultStatus !== 'Success') {
@ -328,15 +331,20 @@ export default class KMIP {
response.lookup(
'Response Message/Batch Item/Result Message')[0];
const error = Error('KMIP request failure',
{ resultStatus,
resultReason,
resultMessage });
{ resultStatus,
resultReason,
resultMessage });
logger.error('KMIP::request: request failed',
{ error, resultStatus,
resultReason, resultMessage });
{ error, resultStatus,
resultReason, resultMessage });
return cb(error);
}
return cb(null, response);
});
}
}
module.exports = KMIP;

View File

@ -1,9 +1,11 @@
import assert from 'assert';
'use strict'; // eslint-disable-line
const assert = require('assert');
const DEFAULT_PIPELINE_DEPTH = 8;
const DEFAULT_KMIP_PORT = 5696;
export default class TransportTemplate {
class TransportTemplate {
/**
* Construct a new object of the TransportTemplate class
* @param {Object} channel - Typically the tls object
@ -84,8 +86,8 @@ export default class TransportTemplate {
const deferedRequest = this.deferedRequests.shift();
process.nextTick(() => {
this.send(logger,
deferedRequest.encodedMessage,
deferedRequest.cb);
deferedRequest.encodedMessage,
deferedRequest.cb);
});
} else if (this.callbackPipeline.length === 0 &&
this.deferedRequests.length === 0 &&
@ -168,3 +170,5 @@ export default class TransportTemplate {
conversation.end();
}
}
module.exports = TransportTemplate;

View File

@ -1,3 +1,5 @@
'use strict'; // eslint-disable-line
const tls = require('tls');
const TransportTemplate = require('./TransportTemplate.js');
@ -7,4 +9,4 @@ class TlsTransport extends TransportTemplate {
}
}
module.exports = TlsTra
module.exports = TlsTransport;

View File

@ -1,14 +1,14 @@
import httpServer from '../http/server';
import werelogs from 'werelogs';
import errors from '../../errors';
import ZenkoMetrics from '../../metrics/ZenkoMetrics';
import { sendSuccess, sendError } from './Utils';
const httpServer = require('../http/server');
const werelogs = require('werelogs');
const errors = require('../../errors');
const ZenkoMetrics = require('../../metrics/ZenkoMetrics');
const { sendSuccess, sendError } = require('./Utils');
function checkStub(log) { // eslint-disable-line
return true;
}
export default class HealthProbeServer extends httpServer {
class HealthProbeServer extends httpServer {
constructor(params) {
const logging = new werelogs.Logger('HealthProbeServer');
super(params.port, logging);
@ -72,3 +72,5 @@ export default class HealthProbeServer extends httpServer {
res.end(ZenkoMetrics.asPrometheus());
}
}
module.exports = HealthProbeServer;

View File

@ -1,10 +1,10 @@
import httpServer from '../http/server';
import werelogs from 'werelogs';
import errors from '../../errors';
const httpServer = require('../http/server');
const werelogs = require('werelogs');
const errors = require('../../errors');
export const DEFAULT_LIVE_ROUTE = '/_/live';
export const DEFAULT_READY_ROUTE = '/_/ready';
export const DEFAULT_METRICS_ROUTE = '/metrics';
const DEFAULT_LIVE_ROUTE = '/_/live';
const DEFAULT_READY_ROUTE = '/_/ready';
const DEFAULT_METRICS_ROUTE = '/metrics';
/**
* ProbeDelegate is used to handle probe checks.
@ -28,7 +28,7 @@ export const DEFAULT_METRICS_ROUTE = '/metrics';
*
* @extends {httpServer}
*/
export class ProbeServer extends httpServer {
class ProbeServer extends httpServer {
/**
* Create a new ProbeServer with parameters
*
@ -92,3 +92,10 @@ export class ProbeServer extends httpServer {
this._handlers.get(req.url)(res, log);
}
}
module.exports = {
ProbeServer,
DEFAULT_LIVE_ROUTE,
DEFAULT_READY_ROUTE,
DEFAULT_METRICS_ROUTE,
};

View File

@ -5,7 +5,7 @@
* @param {string} [message] - Message to send as response, defaults to OK
* @returns {undefined}
*/
export function sendSuccess(res, log, message = 'OK') {
function sendSuccess(res, log, message = 'OK') {
log.debug('replying with success');
res.writeHead(200);
res.end(message);
@ -19,14 +19,14 @@ export function sendSuccess(res, log, message = 'OK') {
* @param {string} [optMessage] - Message to use instead of the errors message
* @returns {undefined}
*/
export function sendError(res, log, error, optMessage) {
function sendError(res, log, error, optMessage) {
const message = optMessage || error.description || '';
log.debug('sending back error response',
{
httpCode: error.code,
errorType: error.message,
error: message,
},
}
);
res.writeHead(error.code);
res.end(JSON.stringify({
@ -34,3 +34,8 @@ export function sendError(res, log, error, optMessage) {
errorMessage: message,
}));
}
module.exports = {
sendSuccess,
sendError,
};

View File

@ -1,10 +1,12 @@
import assert from 'assert';
import http from 'http';
import werelogs from 'werelogs';
'use strict'; // eslint-disable-line
import * as constants from '../../constants';
import * as utils from './utils';
import errors from '../../errors';
const assert = require('assert');
const http = require('http');
const werelogs = require('werelogs');
const constants = require('../../constants');
const utils = require('./utils');
const errors = require('../../errors');
const HttpAgent = require('agentkeepalive');
@ -62,7 +64,7 @@ function makeErrorFromHTTPResponse(response) {
*
* The API is usable when the object is constructed.
*/
export default class RESTClient {
class RESTClient {
/**
* Interface to the data file server
* @constructor
@ -309,3 +311,5 @@ export default class RESTClient {
* @callback RESTClient~deleteCallback
* @param {Error} - The encountered error
*/
module.exports = RESTClient;

View File

@ -1,13 +1,15 @@
import assert from 'assert';
import url from 'url';
'use strict'; // eslint-disable-line
import werelogs from 'werelogs';
const assert = require('assert');
const url = require('url');
import httpServer from '../http/server';
import * as constants from '../../constants';
import { parseURL } from './utils';
import * as httpUtils from '../http/utils';
import errors from '../../errors';
const werelogs = require('werelogs');
const httpServer = require('../http/server');
const constants = require('../../constants');
const { parseURL } = require('./utils');
const httpUtils = require('../http/utils');
const errors = require('../../errors');
function setContentLength(response, contentLength) {
response.setHeader('Content-Length', contentLength.toString());
@ -17,7 +19,7 @@ function setContentRange(response, byteRange, objectSize) {
const [start, end] = byteRange;
assert(start !== undefined && end !== undefined);
response.setHeader('Content-Range',
`bytes ${start}-${end}/${objectSize}`);
`bytes ${start}-${end}/${objectSize}`);
}
function sendError(res, log, error, optMessage) {
@ -42,7 +44,8 @@ function sendError(res, log, error, optMessage) {
* You have to call setup() to initialize the storage backend, then
* start() to start listening to the configured port.
*/
export default class RESTServer extends httpServer {
class RESTServer extends httpServer {
/**
* @constructor
* @param {Object} params - constructor params
@ -224,7 +227,7 @@ export default class RESTServer extends httpServer {
return sendError(res, log, err);
}
log.debug('sending back 200/206 response with contents',
{ key: pathInfo.key });
{ key: pathInfo.key });
setContentLength(res, contentLength);
res.setHeader('Accept-Ranges', 'bytes');
if (byteRange) {
@ -262,7 +265,7 @@ export default class RESTServer extends httpServer {
return sendError(res, log, err);
}
log.debug('sending back 204 response to DELETE',
{ key: pathInfo.key });
{ key: pathInfo.key });
res.writeHead(204);
return res.end(() => {
log.debug('DELETE response sent', { key: pathInfo.key });
@ -271,3 +274,5 @@ export default class RESTServer extends httpServer {
return undefined;
}
}
module.exports = RESTServer;

View File

@ -1,10 +1,12 @@
import errors from '../../errors';
import * as constants from '../../constants';
import * as url from 'url';
'use strict'; // eslint-disable-line
const errors = require('../../errors');
const constants = require('../../constants');
const url = require('url');
const passthroughPrefixLength = constants.passthroughFileURL.length;
export function explodePath(path) {
function explodePath(path) {
if (path.startsWith(constants.passthroughFileURL)) {
const key = path.slice(passthroughPrefixLength + 1);
return {
@ -17,7 +19,7 @@ export function explodePath(path) {
return {
service: pathMatch[1],
key: (pathMatch[3] !== undefined && pathMatch[3].length > 0 ?
pathMatch[3] : undefined),
pathMatch[3] : undefined),
};
}
throw errors.InvalidURI.customizeDescription('malformed URI');
@ -35,7 +37,7 @@ export function explodePath(path) {
* - pathInfo.service {String} - The name of REST service ("DataFile")
* - pathInfo.key {String} - The requested key
*/
export function parseURL(urlStr, expectKey) {
function parseURL(urlStr, expectKey) {
const urlObj = url.parse(urlStr);
const pathInfo = explodePath(decodeURI(urlObj.path));
if ((pathInfo.service !== constants.dataFileURL)
@ -59,3 +61,8 @@ export function parseURL(urlStr, expectKey) {
}
return pathInfo;
}
module.exports = {
explodePath,
parseURL,
};

View File

@ -1,6 +1,8 @@
import assert from 'assert';
'use strict'; // eslint-disable-line
import * as rpc from './rpc';
const assert = require('assert');
const rpc = require('./rpc.js');
/**
* @class
@ -15,6 +17,7 @@ import * as rpc from './rpc';
* RPC client object accessing the sub-level transparently.
*/
class LevelDbClient extends rpc.BaseClient {
/**
* @constructor
*
@ -75,6 +78,7 @@ class LevelDbClient extends rpc.BaseClient {
* env.subDb (env is passed as first parameter of received RPC calls).
*/
class LevelDbService extends rpc.BaseService {
/**
* @constructor
*

View File

@ -1,14 +1,17 @@
import http from 'http';
import io from 'socket.io';
import ioClient from 'socket.io-client';
import * as sioStream from './sio-stream';
import async from 'async';
import assert from 'assert';
import { EventEmitter } from 'events';
'use strict'; // eslint-disable-line
import { flattenError, reconstructError } from './utils';
import errors from '../../errors';
import * as jsutil from '../../jsutil';
const http = require('http');
const io = require('socket.io');
const ioClient = require('socket.io-client');
const sioStream = require('./sio-stream');
const async = require('async');
const assert = require('assert');
const EventEmitter = require('events').EventEmitter;
const flattenError = require('./utils').flattenError;
const reconstructError = require('./utils').reconstructError;
const errors = require('../../errors');
const jsutil = require('../../jsutil');
const DEFAULT_CALL_TIMEOUT_MS = 30000;
@ -33,7 +36,8 @@ let streamRPCJSONObj;
* - the return value is passed as callback's second argument (unless
* an error occurred).
*/
export class BaseClient extends EventEmitter {
class BaseClient extends EventEmitter {
/**
* @constructor
*
@ -50,7 +54,7 @@ export class BaseClient extends EventEmitter {
*/
constructor(params) {
const { url, logger, callTimeoutMs,
streamMaxPendingAck, streamAckTimeoutMs } = params;
streamMaxPendingAck, streamAckTimeoutMs } = params;
assert(url);
assert(logger);
@ -78,11 +82,11 @@ export class BaseClient extends EventEmitter {
_call(remoteCall, args, cb) {
const wrapCb = (err, data) => {
cb(reconstructError(err),
this.socketStreams.decodeStreams(data));
this.socketStreams.decodeStreams(data));
};
this.logger.debug('remote call', { remoteCall, args });
this.socket.emit('call', remoteCall,
this.socketStreams.encodeStreams(args), wrapCb);
this.socketStreams.encodeStreams(args), wrapCb);
return undefined;
}
@ -109,8 +113,8 @@ export class BaseClient extends EventEmitter {
throw new Error(`argument cb=${cb} is not a callback`);
}
async.timeout(this._call.bind(this), timeoutMs,
`operation ${remoteCall} timed out`)(remoteCall,
args, cb);
`operation ${remoteCall} timed out`)(remoteCall,
args, cb);
return undefined;
}
@ -138,7 +142,7 @@ export class BaseClient extends EventEmitter {
const url = this.url;
this.socket.on('error', err => {
this.logger.warn('connectivity error to the RPC service',
{ url, error: err });
{ url, error: err });
});
this.socket.on('connect', () => {
this.emit('connect');
@ -152,7 +156,7 @@ export class BaseClient extends EventEmitter {
this.getManifest((err, manifest) => {
if (err) {
this.logger.error('Error fetching manifest from RPC server',
{ error: err });
{ error: err });
} else {
manifest.api.forEach(apiItem => {
this.createCall(apiItem.name);
@ -246,7 +250,8 @@ export class BaseClient extends EventEmitter {
* method.
*
*/
export class BaseService {
class BaseService {
/**
* @constructor
*
@ -464,8 +469,7 @@ export class BaseService {
* @return {Object} a server object, not yet listening on a TCP port
* (you must call listen(port) on the returned object)
*/
export function RPCServer(params) {
function RPCServer(params) {
assert(params.logger);
const httpServer = http.createServer();
@ -493,7 +497,7 @@ export function RPCServer(params) {
conn.on('error', err => {
log.error('error on socket.io connection',
{ namespace: service.namespace, error: err });
{ namespace: service.namespace, error: err });
});
conn.on('call', (remoteCall, args, cb) => {
const decodedArgs = streamsSocket.decodeStreams(args);
@ -643,8 +647,8 @@ streamRPCJSONObj = function _streamRPCJSONObj(obj, wstream, cb) {
// primitive types
if (obj === undefined) {
wstream.write('null'); // if undefined elements are present in
// arrays, convert them to JSON null
// objects
// arrays, convert them to JSON null
// objects
} else {
wstream.write(JSON.stringify(obj));
}
@ -664,7 +668,7 @@ streamRPCJSONObj = function _streamRPCJSONObj(obj, wstream, cb) {
* @return {Object} a HTTP server object, not yet listening on a TCP
* port (you must call listen(port) on the returned object)
*/
export function RESTServer(params) {
function RESTServer(params) {
assert(params);
assert(params.logger);
const httpServer = http.createServer((req, res) => {
@ -735,3 +739,11 @@ export function RESTServer(params) {
return httpServer;
}
module.exports = {
BaseClient,
BaseService,
RPCServer,
RESTServer,
};

View File

@ -1,11 +1,13 @@
import uuid from 'uuid';
import stream from 'stream';
import debug_ from 'debug';
import assert from 'assert';
import async from 'async';
import { flattenError, reconstructError } from './utils';
const debug = debug_('sio-stream');
'use strict'; // eslint-disable-line
const uuid = require('uuid');
const stream = require('stream');
const debug = require('debug')('sio-stream');
const assert = require('assert');
const async = require('async');
const flattenError = require('./utils').flattenError;
const reconstructError = require('./utils').reconstructError;
const DEFAULT_MAX_PENDING_ACK = 4;
const DEFAULT_ACK_TIMEOUT_MS = 5000;
@ -14,7 +16,7 @@ class SIOOutputStream extends stream.Writable {
constructor(socket, streamId, maxPendingAck, ackTimeoutMs) {
super({ objectMode: true });
this._initOutputStream(socket, streamId, maxPendingAck,
ackTimeoutMs);
ackTimeoutMs);
}
_initOutputStream(socket, streamId, maxPendingAck, ackTimeoutMs) {
@ -192,7 +194,7 @@ class SIOStreamSocket {
this.socket.on('stream-data', (payload, cb) => {
const { streamId, data } = payload;
log.debug('received \'stream-data\' event',
{ streamId, size: data.length });
{ streamId, size: data.length });
const stream = this.remoteStreams[streamId];
if (!stream) {
log.debug('no such remote stream registered', { streamId });
@ -278,15 +280,15 @@ class SIOStreamSocket {
let transportStream;
if (isReadStream) {
transportStream = new SIOOutputStream(this, streamId,
this.maxPendingAck,
this.ackTimeoutMs);
this.maxPendingAck,
this.ackTimeoutMs);
} else {
transportStream = new SIOInputStream(this, streamId);
}
this.localStreams[streamId] = arg;
arg.once('close', () => {
log.debug('stream closed, removing from local streams',
{ streamId });
{ streamId });
delete this.localStreams[streamId];
});
arg.on('error', error => {
@ -348,8 +350,8 @@ class SIOStreamSocket {
stream = new SIOInputStream(this, streamId);
} else if (arg.writable) {
stream = new SIOOutputStream(this, streamId,
this.maxPendingAck,
this.ackTimeoutMs);
this.maxPendingAck,
this.ackTimeoutMs);
} else {
throw new Error('can\'t decode stream neither readable ' +
'nor writable');
@ -358,14 +360,14 @@ class SIOStreamSocket {
if (arg.readable) {
stream.once('close', () => {
log.debug('stream closed, removing from remote streams',
{ streamId });
{ streamId });
delete this.remoteStreams[streamId];
});
}
if (arg.writable) {
stream.once('finish', () => {
log.debug('stream finished, removing from remote streams',
{ streamId });
{ streamId });
delete this.remoteStreams[streamId];
});
}
@ -397,7 +399,7 @@ class SIOStreamSocket {
_write(streamId, data, cb) {
this.logger.debug('emit \'stream-data\' event',
{ streamId, size: data.length });
{ streamId, size: data.length });
this.socket.emit('stream-data', { streamId, data }, cb);
}
@ -431,7 +433,7 @@ class SIOStreamSocket {
}
}
export function createSocket(
module.exports.createSocket = function createSocket(
socket,
logger,
maxPendingAck = DEFAULT_MAX_PENDING_ACK,

View File

@ -1,3 +1,5 @@
'use strict'; // eslint-disable-line
/**
* @brief turn all <tt>err</tt> own and prototype attributes into own attributes
*
@ -7,7 +9,7 @@
* @param {Error} err error object
* @return {Object} flattened object containing <tt>err</tt> attributes
*/
export function flattenError(err) {
module.exports.flattenError = function flattenError(err) {
if (!err) {
return err;
}
@ -33,7 +35,7 @@ export function flattenError(err) {
* @return {Error} a reconstructed Error object inheriting <tt>err</tt>
* attributes
*/
export function reconstructError(err) {
module.exports.reconstructError = function reconstructError(err) {
if (!err) {
return err;
}

View File

@ -1,7 +1,9 @@
import { URL } from 'url';
import { decryptSecret } from '../executables/pensieveCreds/utils';
'use strict'; // eslint-disable-line strict
export function patchLocations(overlayLocations, creds, log) {
const { URL } = require('url');
const { decryptSecret } = require('../executables/pensieveCreds/utils');
function patchLocations(overlayLocations, creds, log) {
if (!overlayLocations) {
return {};
}
@ -151,3 +153,7 @@ export function patchLocations(overlayLocations, creds, log) {
});
return locations;
}
module.exports = {
patchLocations,
};

View File

@ -1,7 +1,9 @@
import Ajv from 'ajv';
import userPolicySchema from './userPolicySchema.json';
import resourcePolicySchema from './resourcePolicySchema.json';
import errors from '../errors';
'use strict'; // eslint-disable-line strict
const Ajv = require('ajv');
const userPolicySchema = require('./userPolicySchema');
const resourcePolicySchema = require('./resourcePolicySchema');
const errors = require('../errors');
const ajValidate = new Ajv({ allErrors: true });
ajValidate.addMetaSchema(require('ajv/lib/refs/json-schema-draft-06.json'));
@ -115,7 +117,7 @@ function _validatePolicy(type, policy) {
* @returns {Object} - returns object with properties error and value
* @returns {ValidationResult} - result of the validation
*/
export function validateUserPolicy(policy) {
function validateUserPolicy(policy) {
return _validatePolicy('user', policy);
}
@ -125,6 +127,11 @@ export function validateUserPolicy(policy) {
* @returns {Object} - returns object with properties error and value
* @returns {ValidationResult} - result of the validation
*/
export function validateResourcePolicy(policy) {
function validateResourcePolicy(policy) {
return _validatePolicy('resource', policy);
}
module.exports = {
validateUserPolicy,
validateResourcePolicy,
};

View File

@ -1,17 +1,19 @@
import { parseIp } from '../ipCheck';
'use strict'; // eslint-disable-line strict
const parseIp = require('../ipCheck').parseIp;
// http://docs.aws.amazon.com/IAM/latest/UserGuide/list_s3.html
// For MPU actions:
// http://docs.aws.amazon.com/AmazonS3/latest/dev/mpuAndPermissions.html
// For bucket head and object head:
// http://docs.aws.amazon.com/AmazonS3/latest/dev/
// using-with-s3-actions.html
import {
const {
actionMapRQ,
actionMapIAM,
actionMapSSO,
actionMapSTS,
actionMapMetadata,
} from './utils/actionMaps';
} = require('./utils/actionMaps');
const _actionNeedQuotaCheck = {
objectPut: true,
@ -127,7 +129,7 @@ function _buildArn(service, generalResource, specificResource, requesterInfo) {
* @return {RequestContext} a RequestContext instance
*/
export default class RequestContext {
class RequestContext {
constructor(headers, query, generalResource, specificResource,
requesterIp, sslEnabled, apiMethod,
awsService, locationConstraint, requesterInfo,
@ -646,3 +648,5 @@ export default class RequestContext {
return this._needTagEval;
}
}
module.exports = RequestContext;

View File

@ -1,8 +1,14 @@
import { substituteVariables } from './utils/variables';
import { handleWildcards } from './utils/wildcards';
import { findConditionKey, convertConditionOperator } from './utils/conditions.js';
import checkArnMatch from './utils/checkArnMatch.js';
import { transformTagKeyValue } from './utils/objectTags';
'use strict'; // eslint-disable-line strict
const substituteVariables = require('./utils/variables.js');
const handleWildcards = require('./utils/wildcards.js').handleWildcards;
const conditions = require('./utils/conditions.js');
const findConditionKey = conditions.findConditionKey;
const convertConditionOperator = conditions.convertConditionOperator;
const checkArnMatch = require('./utils/checkArnMatch.js');
const { transformTagKeyValue } = require('./utils/objectTags');
const evaluators = {};
const operatorsWithVariables = ['StringEquals', 'StringNotEquals',
'StringEqualsIgnoreCase', 'StringNotEqualsIgnoreCase',
@ -22,7 +28,7 @@ const tagConditions = new Set(['s3:ExistingObjectTag', 's3:RequestObjectTagKey',
* @param {object} log - logger
* @return {boolean} true if applicable, false if not
*/
export function isResourceApplicable(requestContext, statementResource, log) {
evaluators.isResourceApplicable = (requestContext, statementResource, log) => {
const resource = requestContext.getResource();
if (!Array.isArray(statementResource)) {
// eslint-disable-next-line no-param-reassign
@ -44,7 +50,7 @@ export function isResourceApplicable(requestContext, statementResource, log) {
requestResourceArr, true);
if (arnSegmentsMatch) {
log.trace('policy resource is applicable to request',
{ requestResource: resource, policyResource });
{ requestResource: resource, policyResource });
return true;
}
continue;
@ -63,7 +69,7 @@ export function isResourceApplicable(requestContext, statementResource, log) {
* @param {Object} log - logger
* @return {boolean} true if applicable, false if not
*/
export function isActionApplicable(requestAction, statementAction, log) {
evaluators.isActionApplicable = (requestAction, statementAction, log) => {
if (!Array.isArray(statementAction)) {
// eslint-disable-next-line no-param-reassign
statementAction = [statementAction];
@ -95,7 +101,7 @@ export function isActionApplicable(requestAction, statementAction, log) {
* @return {Object} contains whether conditions are allowed and whether they
* contain any tag condition keys
*/
export function meetConditions(requestContext, statementCondition, log) {
evaluators.meetConditions = (requestContext, statementCondition, log) => {
// The Condition portion of a policy is an object with different
// operators as keys
const conditionEval = {};
@ -198,7 +204,7 @@ export function meetConditions(requestContext, statementCondition, log) {
* @return {string} Allow if permitted, Deny if not permitted or Neutral
* if not applicable
*/
export function evaluatePolicy(requestContext, policy, log) {
evaluators.evaluatePolicy = (requestContext, policy, log) => {
// TODO: For bucket policies need to add Principal evaluation
let verdict = 'Neutral';
@ -210,33 +216,33 @@ export function evaluatePolicy(requestContext, policy, log) {
const currentStatement = policy.Statement[i];
// If affirmative resource is in policy and request resource is
// not applicable, move on to next statement
if (currentStatement.Resource && !isResourceApplicable(requestContext,
if (currentStatement.Resource && !evaluators.isResourceApplicable(requestContext,
currentStatement.Resource, log)) {
continue;
}
// If NotResource is in policy and resource matches NotResource
// in policy, move on to next statement
if (currentStatement.NotResource &&
isResourceApplicable(requestContext,
currentStatement.NotResource, log)) {
evaluators.isResourceApplicable(requestContext,
currentStatement.NotResource, log)) {
continue;
}
// If affirmative action is in policy and request action is not
// applicable, move on to next statement
if (currentStatement.Action &&
!isActionApplicable(requestContext.getAction(),
currentStatement.Action, log)) {
!evaluators.isActionApplicable(requestContext.getAction(),
currentStatement.Action, log)) {
continue;
}
// If NotAction is in policy and action matches NotAction in policy,
// move on to next statement
if (currentStatement.NotAction &&
isActionApplicable(requestContext.getAction(),
currentStatement.NotAction, log)) {
evaluators.isActionApplicable(requestContext.getAction(),
currentStatement.NotAction, log)) {
continue;
}
const conditionEval = currentStatement.Condition ?
meetConditions(requestContext, currentStatement.Condition, log) :
evaluators.meetConditions(requestContext, currentStatement.Condition, log) :
null;
// If do not meet conditions move on to next statement
if (conditionEval && !conditionEval.allow) {
@ -270,12 +276,12 @@ export function evaluatePolicy(requestContext, policy, log) {
* @return {string} Allow if permitted, Deny if not permitted.
* Default is to Deny. Deny overrides an Allow
*/
export function evaluateAllPolicies(requestContext, allPolicies, log) {
evaluators.evaluateAllPolicies = (requestContext, allPolicies, log) => {
log.trace('evaluating all policies');
let verdict = 'Deny';
for (let i = 0; i < allPolicies.length; i++) {
const singlePolicyVerdict =
evaluatePolicy(requestContext, allPolicies[i], log);
evaluators.evaluatePolicy(requestContext, allPolicies[i], log);
// If there is any Deny, just return Deny
if (singlePolicyVerdict === 'Deny') {
return 'Deny';
@ -287,3 +293,5 @@ export function evaluateAllPolicies(requestContext, allPolicies, log) {
log.trace('result of evaluating all pollicies', { verdict });
return verdict;
};
module.exports = evaluators;

View File

@ -1,9 +1,9 @@
import { meetConditions } from './evaluator';
const { meetConditions } = require('./evaluator');
/**
* Class with methods to manage the policy 'principal' validation
*/
export default class Principal {
class Principal {
/**
* Function to evaluate conditions if needed
*
@ -176,3 +176,5 @@ export default class Principal {
};
}
}
module.exports = Principal;

View File

@ -1,4 +1,4 @@
import * as ipCheck from '../ipCheck';
const ipCheck = require('../ipCheck');
/**
* getClientIp - Gets the client IP from the request
@ -6,7 +6,7 @@ import * as ipCheck from '../ipCheck';
* @param {object} s3config - s3 config
* @return {string} - returns client IP from the request
*/
export function getClientIp(request, s3config) {
function getClientIp(request, s3config) {
const requestConfig = s3config ? s3config.requests : {};
const remoteAddress = request.socket.remoteAddress;
const clientIp = requestConfig ? remoteAddress : request.headers['x-forwarded-for'] || remoteAddress;
@ -26,3 +26,7 @@ export function getClientIp(request, s3config) {
}
return clientIp;
}
module.exports = {
getClientIp,
};

View File

@ -1,4 +1,4 @@
export const sharedActionMap = {
const sharedActionMap = {
bucketDelete: 's3:DeleteBucket',
// the "s3:PutEncryptionConfiguration" action also governs DELETE
bucketDeleteEncryption: 's3:PutEncryptionConfiguration',
@ -47,7 +47,7 @@ export const sharedActionMap = {
};
// action map used for request context
export const actionMapRQ = Object.assign({
const actionMapRQ = Object.assign({
bucketPut: 's3:CreateBucket',
// for bucketDeleteCors need s3:PutBucketCORS permission
// see http://docs.aws.amazon.com/AmazonS3/latest/API/
@ -73,15 +73,15 @@ export const actionMapRQ = Object.assign({
}, sharedActionMap);
// action map used for bucket policies
export const actionMapBP = Object.assign({}, sharedActionMap);
const actionMapBP = Object.assign({}, sharedActionMap);
// action map for all relevant s3 actions
export const actionMapS3 = Object.assign({
const actionMapS3 = Object.assign({
bucketGetNotification: 's3:GetBucketNotification',
bucketPutNotification: 's3:PutBucketNotification',
}, sharedActionMap, actionMapRQ, actionMapBP);
export const actionMonitoringMapS3 = {
const actionMonitoringMapS3 = {
bucketDelete: 'DeleteBucket',
bucketDeleteCors: 'DeleteBucketCors',
bucketDeleteEncryption: 'DeleteBucketEncryption',
@ -139,7 +139,7 @@ export const actionMonitoringMapS3 = {
serviceGet: 'ListBuckets',
};
export const actionMapIAM = {
const actionMapIAM = {
attachGroupPolicy: 'iam:AttachGroupPolicy',
attachUserPolicy: 'iam:AttachUserPolicy',
createAccessKey: 'iam:CreateAccessKey',
@ -178,15 +178,26 @@ export const actionMapIAM = {
getCredentialReport: 'iam:GetCredentialReport',
};
export const actionMapSSO = {
const actionMapSSO = {
SsoAuthorize: 'sso:Authorize',
};
export const actionMapSTS = {
const actionMapSTS = {
assumeRole: 'sts:AssumeRole',
};
export const actionMapMetadata = {
const actionMapMetadata = {
admin: 'metadata:admin',
default: 'metadata:bucketd',
};
module.exports = {
actionMapRQ,
actionMapBP,
actionMapS3,
actionMonitoringMapS3,
actionMapIAM,
actionMapSSO,
actionMapSTS,
actionMapMetadata,
};

Some files were not shown because too many files have changed in this diff Show More