Compare commits

..

9 Commits

Author SHA1 Message Date
Guillaume Hivert fd8a49e71c ARSN-69 Switch all JS files to TS files 2022-02-23 16:43:02 +01:00
Guillaume Hivert c4b9071048 ARSN-84 Fix coverage by using Istanbul and Jest
Jest coverage is disfunctionning because it's not able to cover code in
spawned subprocesses. Istanbul can, so the final process is to launch
nyc and jest together. Jest emit some coverage and nyc is getting the
coverage and its own coverage to emit the proper final coverage files.
2022-02-18 18:04:23 +01:00
Guillaume Hivert 3bdae8814a ARSN-84 Correct Jest configuration for test suites and coverage
Thanks to files renaming, we can follow as much as we can the jest
default configurations. The options are gone, and we're specifying only
the maxWorkers (because the test suite is linear, and bugs if we're
running it in parallel) and the collect coverage files.
The coverage script itself is joined into one command instead of three
to leverage the Jest builtin coverage.
2022-02-18 18:04:18 +01:00
Guillaume Hivert 3a7d9b7525 ARSN-84 Rename all test files from [name].js to [name].spec.js
In order to simplify jest configuration, we have to remane the files to
follow the jest convention (to have a .spec.js extension for test
files).
2022-02-18 18:04:14 +01:00
Guillaume Hivert 97cca52179 ARSN-84 Fix Jest timeout for long HealthProbeServer 2022-02-18 18:04:08 +01:00
Guillaume Hivert 93f4ae2c7e ARSN-84 Fix Jest bug in _arsenalError
You can check out the bug at
https://github.com/facebook/jest/issues/2549.
The bug in inherent to jest and is a known bug since years, because jest
is switching the VM from node to a custom VM from jest. Jest injects
its own set of globals. The Error provided by Jest is different from
the Error provided by node and the test `err instanceof Error` is false.
Error:
```
 Expected value to be equal to:
      true
 Received:
      false
```
2022-02-18 18:04:03 +01:00
Guillaume Hivert 2f326fdee4 ARSN-84 Fix redis commands in functional tests
The switch from mocha to Jest introduces some tests bugs.
As far as we can tell, jest is quicker than mocha, creating some
weird behaviour: some commands send to redis (with ioredis)
are working, and some aren’t. Our conclusion is that redis needs
to queue requests offline to avoid micro-disconnections from
redis in development. Otherwise, we got the following error:
```
  - StatsModel class › should correctly record a new request by default
one increment

    assert.ifError(received, expected)

    Expected value ifError to:
      null
    Received:
      [Error: Stream isn't writeable and enableOfflineQueue options is
false]

    Message:
      ifError got unwanted exception: Stream isn't writeable and
enableOfflineQueue options is false
```
Switching enableOfflineQueue to true makes the test suite to
success.
2022-02-18 18:03:58 +01:00
Guillaume Hivert 824dc63d54 ARSN-84 Fix linting with correct indentation and trailing commas 2022-02-18 18:03:52 +01:00
Guillaume Hivert 1135a2e42c ARSN-84 Introduce TypeScript, Jest and reconfigure ESLint
Introduces TypeScript with proper tsconfig.json with sane defaults,
add Jest as a test runner as a mocha replacement to have the
TS compiling on the fly and allowing mixed sources TS/JS in the
sources (and replacing the before and after of mocha with beforAll
and afterAll of Jest), and adding some ESLint configuration to make
ESLint happy.
2022-02-18 18:03:37 +01:00
297 changed files with 8483 additions and 5943 deletions

3
.gitignore vendored
View File

@ -13,3 +13,6 @@ node_modules/
# Coverage # Coverage
coverage/ coverage/
.nyc_output/ .nyc_output/
# TypeScript
build/

4
.prettierrc Normal file
View File

@ -0,0 +1,4 @@
{
"tabWidth": 4,
"singleQuote": true
}

197
index.js
View File

@ -1,197 +0,0 @@
module.exports = {
auth: require('./lib/auth/auth'),
constants: require('./lib/constants'),
db: require('./lib/db'),
errors: require('./lib/errors.js'),
errorUtils: require('./lib/errorUtils'),
shuffle: require('./lib/shuffle'),
stringHash: require('./lib/stringHash'),
ipCheck: require('./lib/ipCheck'),
jsutil: require('./lib/jsutil'),
https: {
ciphers: require('./lib/https/ciphers.js'),
dhparam: require('./lib/https/dh2048.js'),
},
algorithms: {
list: require('./lib/algos/list/exportAlgos'),
listTools: {
DelimiterTools: require('./lib/algos/list/tools'),
},
cache: {
LRUCache: require('./lib/algos/cache/LRUCache'),
},
stream: {
MergeStream: require('./lib/algos/stream/MergeStream'),
},
SortedSet: require('./lib/algos/set/SortedSet'),
},
policies: {
evaluators: require('./lib/policyEvaluator/evaluator.js'),
validateUserPolicy: require('./lib/policy/policyValidator')
.validateUserPolicy,
evaluatePrincipal: require('./lib/policyEvaluator/principal'),
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
requestUtils: require('./lib/policyEvaluator/requestUtils'),
actionMaps: require('./lib/policyEvaluator/utils/actionMaps'),
},
Clustering: require('./lib/Clustering'),
testing: {
matrix: require('./lib/testing/matrix.js'),
},
versioning: {
VersioningConstants: require('./lib/versioning/constants.js')
.VersioningConstants,
Version: require('./lib/versioning/Version.js').Version,
VersionID: require('./lib/versioning/VersionID.js'),
},
network: {
http: {
server: require('./lib/network/http/server'),
},
rpc: require('./lib/network/rpc/rpc'),
level: require('./lib/network/rpc/level-net'),
rest: {
RESTServer: require('./lib/network/rest/RESTServer'),
RESTClient: require('./lib/network/rest/RESTClient'),
},
RoundRobin: require('./lib/network/RoundRobin'),
probe: {
ProbeServer: require('./lib/network/probe/ProbeServer'),
HealthProbeServer:
require('./lib/network/probe/HealthProbeServer.js'),
Utils: require('./lib/network/probe/Utils.js'),
},
kmip: require('./lib/network/kmip'),
kmipClient: require('./lib/network/kmip/Client'),
},
s3routes: {
routes: require('./lib/s3routes/routes'),
routesUtils: require('./lib/s3routes/routesUtils'),
},
s3middleware: {
userMetadata: require('./lib/s3middleware/userMetadata'),
convertToXml: require('./lib/s3middleware/convertToXml'),
escapeForXml: require('./lib/s3middleware/escapeForXml'),
objectLegalHold: require('./lib/s3middleware/objectLegalHold'),
tagging: require('./lib/s3middleware/tagging'),
checkDateModifiedHeaders:
require('./lib/s3middleware/validateConditionalHeaders')
.checkDateModifiedHeaders,
validateConditionalHeaders:
require('./lib/s3middleware/validateConditionalHeaders')
.validateConditionalHeaders,
MD5Sum: require('./lib/s3middleware/MD5Sum'),
NullStream: require('./lib/s3middleware/nullStream'),
objectUtils: require('./lib/s3middleware/objectUtils'),
azureHelper: {
mpuUtils:
require('./lib/s3middleware/azureHelpers/mpuUtils'),
ResultsCollector:
require('./lib/s3middleware/azureHelpers/ResultsCollector'),
SubStreamInterface:
require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
},
prepareStream: require('./lib/s3middleware/prepareStream'),
processMpuParts: require('./lib/s3middleware/processMpuParts'),
retention: require('./lib/s3middleware/objectRetention'),
lifecycleHelpers: require('./lib/s3middleware/lifecycleHelpers'),
},
storage: {
metadata: {
MetadataWrapper: require('./lib/storage/metadata/MetadataWrapper'),
bucketclient: {
BucketClientInterface:
require('./lib/storage/metadata/bucketclient/' +
'BucketClientInterface'),
LogConsumer:
require('./lib/storage/metadata/bucketclient/LogConsumer'),
},
file: {
BucketFileInterface:
require('./lib/storage/metadata/file/BucketFileInterface'),
MetadataFileServer:
require('./lib/storage/metadata/file/MetadataFileServer'),
MetadataFileClient:
require('./lib/storage/metadata/file/MetadataFileClient'),
},
inMemory: {
metastore:
require('./lib/storage/metadata/in_memory/metastore'),
metadata: require('./lib/storage/metadata/in_memory/metadata'),
bucketUtilities:
require('./lib/storage/metadata/in_memory/bucket_utilities'),
},
mongoclient: {
MongoClientInterface:
require('./lib/storage/metadata/mongoclient/' +
'MongoClientInterface'),
LogConsumer:
require('./lib/storage/metadata/mongoclient/LogConsumer'),
},
proxy: {
Server: require('./lib/storage/metadata/proxy/Server'),
},
},
data: {
DataWrapper: require('./lib/storage/data/DataWrapper'),
MultipleBackendGateway:
require('./lib/storage/data/MultipleBackendGateway'),
parseLC: require('./lib/storage/data/LocationConstraintParser'),
file: {
DataFileStore:
require('./lib/storage/data/file/DataFileStore'),
DataFileInterface:
require('./lib/storage/data/file/DataFileInterface'),
},
external: {
AwsClient: require('./lib/storage/data/external/AwsClient'),
AzureClient: require('./lib/storage/data/external/AzureClient'),
GcpClient: require('./lib/storage/data/external/GcpClient'),
GCP: require('./lib/storage/data/external/GCP/GcpService'),
GcpUtils: require('./lib/storage/data/external/GCP/GcpUtils'),
GcpSigner: require('./lib/storage/data/external/GCP/GcpSigner'),
PfsClient: require('./lib/storage/data/external/PfsClient'),
backendUtils: require('./lib/storage/data/external/utils'),
},
inMemory: {
datastore: require('./lib/storage/data/in_memory/datastore'),
},
},
utils: require('./lib/storage/utils'),
},
models: {
BackendInfo: require('./lib/models/BackendInfo'),
BucketInfo: require('./lib/models/BucketInfo'),
BucketAzureInfo: require('./lib/models/BucketAzureInfo'),
ObjectMD: require('./lib/models/ObjectMD'),
ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
ObjectMDAzureInfo: require('./lib/models/ObjectMDAzureInfo'),
ARN: require('./lib/models/ARN'),
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
ReplicationConfiguration:
require('./lib/models/ReplicationConfiguration'),
LifecycleConfiguration:
require('./lib/models/LifecycleConfiguration'),
LifecycleRule: require('./lib/models/LifecycleRule'),
BucketPolicy: require('./lib/models/BucketPolicy'),
ObjectLockConfiguration:
require('./lib/models/ObjectLockConfiguration'),
NotificationConfiguration:
require('./lib/models/NotificationConfiguration'),
},
metrics: {
StatsClient: require('./lib/metrics/StatsClient'),
StatsModel: require('./lib/metrics/StatsModel'),
RedisClient: require('./lib/metrics/RedisClient'),
ZenkoMetrics: require('./lib/metrics/ZenkoMetrics'),
},
pensieve: {
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
},
stream: {
readJSONStreamObject: require('./lib/stream/readJSONStreamObject'),
},
patches: {
locationConstraints: require('./lib/patches/locationConstraints'),
},
};

192
index.ts Normal file
View File

@ -0,0 +1,192 @@
export * as auth from './lib/auth/auth'
export { default as constants } from './lib/constants'
export { default as errors } from './lib/errors'
export { default as shuffle } from './lib/shuffle'
export { default as stringHash } from './lib/stringHash'
export * as db from './lib/db'
export * as errorUtils from './lib/errorUtils'
export * as ipCheck from './lib/ipCheck'
export * as jsutil from './lib/jsutil'
export * as https from './lib/https'
// algorithms: {
// list: require('./lib/algos/list/exportAlgos'),
// listTools: {
// DelimiterTools: require('./lib/algos/list/tools'),
// },
// cache: {
// LRUCache: require('./lib/algos/cache/LRUCache'),
// },
// stream: {
// MergeStream: require('./lib/algos/stream/MergeStream'),
// },
// SortedSet: require('./lib/algos/set/SortedSet'),
// },
// policies: {
// evaluators: require('./lib/policyEvaluator/evaluator.js'),
// validateUserPolicy: require('./lib/policy/policyValidator')
// .validateUserPolicy,
// evaluatePrincipal: require('./lib/policyEvaluator/principal'),
// RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
// requestUtils: require('./lib/policyEvaluator/requestUtils'),
// actionMaps: require('./lib/policyEvaluator/utils/actionMaps'),
// },
// Clustering: require('./lib/Clustering'),
// testing: {
// matrix: require('./lib/testing/matrix.js'),
// },
// versioning: {
// VersioningConstants: require('./lib/versioning/constants.js')
// .VersioningConstants,
// Version: require('./lib/versioning/Version.js').Version,
// VersionID: require('./lib/versioning/VersionID.js'),
// },
// network: {
// http: {
// server: require('./lib/network/http/server'),
// },
// rpc: require('./lib/network/rpc/rpc'),
// level: require('./lib/network/rpc/level-net'),
// rest: {
// RESTServer: require('./lib/network/rest/RESTServer'),
// RESTClient: require('./lib/network/rest/RESTClient'),
// },
// RoundRobin: require('./lib/network/RoundRobin'),
// probe: {
// ProbeServer: require('./lib/network/probe/ProbeServer'),
// HealthProbeServer:
// require('./lib/network/probe/HealthProbeServer.js'),
// Utils: require('./lib/network/probe/Utils.js'),
// },
// kmip: require('./lib/network/kmip'),
// kmipClient: require('./lib/network/kmip/Client'),
// },
// s3routes: {
// routes: require('./lib/s3routes/routes'),
// routesUtils: require('./lib/s3routes/routesUtils'),
// },
// s3middleware: {
// userMetadata: require('./lib/s3middleware/userMetadata'),
// convertToXml: require('./lib/s3middleware/convertToXml'),
// escapeForXml: require('./lib/s3middleware/escapeForXml'),
// objectLegalHold: require('./lib/s3middleware/objectLegalHold'),
// tagging: require('./lib/s3middleware/tagging'),
// checkDateModifiedHeaders:
// require('./lib/s3middleware/validateConditionalHeaders')
// .checkDateModifiedHeaders,
// validateConditionalHeaders:
// require('./lib/s3middleware/validateConditionalHeaders')
// .validateConditionalHeaders,
// MD5Sum: require('./lib/s3middleware/MD5Sum'),
// NullStream: require('./lib/s3middleware/nullStream'),
// objectUtils: require('./lib/s3middleware/objectUtils'),
// azureHelper: {
// mpuUtils:
// require('./lib/s3middleware/azureHelpers/mpuUtils'),
// ResultsCollector:
// require('./lib/s3middleware/azureHelpers/ResultsCollector'),
// SubStreamInterface:
// require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
// },
// prepareStream: require('./lib/s3middleware/prepareStream'),
// processMpuParts: require('./lib/s3middleware/processMpuParts'),
// retention: require('./lib/s3middleware/objectRetention'),
// lifecycleHelpers: require('./lib/s3middleware/lifecycleHelpers'),
// },
// storage: {
// metadata: {
// MetadataWrapper: require('./lib/storage/metadata/MetadataWrapper'),
// bucketclient: {
// BucketClientInterface:
// require('./lib/storage/metadata/bucketclient/' +
// 'BucketClientInterface'),
// LogConsumer:
// require('./lib/storage/metadata/bucketclient/LogConsumer'),
// },
// file: {
// BucketFileInterface:
// require('./lib/storage/metadata/file/BucketFileInterface'),
// MetadataFileServer:
// require('./lib/storage/metadata/file/MetadataFileServer'),
// MetadataFileClient:
// require('./lib/storage/metadata/file/MetadataFileClient'),
// },
// inMemory: {
// metastore:
// require('./lib/storage/metadata/in_memory/metastore'),
// metadata: require('./lib/storage/metadata/in_memory/metadata'),
// bucketUtilities:
// require('./lib/storage/metadata/in_memory/bucket_utilities'),
// },
// mongoclient: {
// MongoClientInterface:
// require('./lib/storage/metadata/mongoclient/' +
// 'MongoClientInterface'),
// LogConsumer:
// require('./lib/storage/metadata/mongoclient/LogConsumer'),
// },
// proxy: {
// Server: require('./lib/storage/metadata/proxy/Server'),
// },
// },
// data: {
// DataWrapper: require('./lib/storage/data/DataWrapper'),
// MultipleBackendGateway:
// require('./lib/storage/data/MultipleBackendGateway'),
// parseLC: require('./lib/storage/data/LocationConstraintParser'),
// file: {
// DataFileStore:
// require('./lib/storage/data/file/DataFileStore'),
// DataFileInterface:
// require('./lib/storage/data/file/DataFileInterface'),
// },
// external: {
// AwsClient: require('./lib/storage/data/external/AwsClient'),
// AzureClient: require('./lib/storage/data/external/AzureClient'),
// GcpClient: require('./lib/storage/data/external/GcpClient'),
// GCP: require('./lib/storage/data/external/GCP/GcpService'),
// GcpUtils: require('./lib/storage/data/external/GCP/GcpUtils'),
// GcpSigner: require('./lib/storage/data/external/GCP/GcpSigner'),
// PfsClient: require('./lib/storage/data/external/PfsClient'),
// backendUtils: require('./lib/storage/data/external/utils'),
// },
// inMemory: {
// datastore: require('./lib/storage/data/in_memory/datastore'),
// },
// },
// utils: require('./lib/storage/utils'),
// },
// models: {
// BackendInfo: require('./lib/models/BackendInfo'),
// BucketInfo: require('./lib/models/BucketInfo'),
// BucketAzureInfo: require('./lib/models/BucketAzureInfo'),
// ObjectMD: require('./lib/models/ObjectMD'),
// ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
// ObjectMDAzureInfo: require('./lib/models/ObjectMDAzureInfo'),
// ARN: require('./lib/models/ARN'),
// WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
// ReplicationConfiguration:
// require('./lib/models/ReplicationConfiguration'),
// LifecycleConfiguration:
// require('./lib/models/LifecycleConfiguration'),
// LifecycleRule: require('./lib/models/LifecycleRule'),
// BucketPolicy: require('./lib/models/BucketPolicy'),
// ObjectLockConfiguration:
// require('./lib/models/ObjectLockConfiguration'),
// NotificationConfiguration:
// require('./lib/models/NotificationConfiguration'),
// },
// metrics: {
// StatsClient: require('./lib/metrics/StatsClient'),
// StatsModel: require('./lib/metrics/StatsModel'),
// RedisClient: require('./lib/metrics/RedisClient'),
// ZenkoMetrics: require('./lib/metrics/ZenkoMetrics'),
// },
// pensieve: {
// credentialUtils: require('./lib/executables/pensieveCreds/utils'),
// },
// stream: {
// readJSONStreamObject: require('./lib/stream/readJSONStreamObject'),
// },
// patches: {
// locationConstraints: require('./lib/patches/locationConstraints'),
// },

View File

@ -1,8 +1,6 @@
'use strict'; // eslint-disable-line import * as cluster from 'cluster';
const cluster = require('cluster'); export default class Clustering {
class Clustering {
/** /**
* Constructor * Constructor
* *
@ -259,5 +257,3 @@ class Clustering {
}); });
} }
} }
module.exports = Clustering;

View File

@ -1,4 +1,4 @@
const assert = require('assert'); import assert from 'assert';
/** /**
* @class * @class
@ -6,13 +6,19 @@ const assert = require('assert');
* number of items and a Least Recently Used (LRU) strategy for * number of items and a Least Recently Used (LRU) strategy for
* eviction. * eviction.
*/ */
class LRUCache { export default class LRUCache {
_maxEntries;
_entryMap;
_entryCount;
_lruTail;
_lruHead;
/** /**
* @constructor * @constructor
* @param {number} maxEntries - maximum number of entries kept in * @param maxEntries - maximum number of entries kept in
* the cache * the cache
*/ */
constructor(maxEntries) { constructor(maxEntries: number) {
assert(maxEntries >= 1); assert(maxEntries >= 1);
this._maxEntries = maxEntries; this._maxEntries = maxEntries;
this.clear(); this.clear();
@ -22,12 +28,12 @@ class LRUCache {
* Add or update the value associated to a key in the cache, * Add or update the value associated to a key in the cache,
* making it the most recently accessed for eviction purpose. * making it the most recently accessed for eviction purpose.
* *
* @param {string} key - key to add * @param key - key to add
* @param {object} value - associated value (can be of any type) * @param value - associated value (can be of any type)
* @return {boolean} true if the cache contained an entry with * @return true if the cache contained an entry with
* this key, false if it did not * this key, false if it did not
*/ */
add(key, value) { add(key: string, value): boolean {
let entry = this._entryMap[key]; let entry = this._entryMap[key];
if (entry) { if (entry) {
entry.value = value; entry.value = value;
@ -54,12 +60,12 @@ class LRUCache {
* Get the value associated to a key in the cache, making it the * Get the value associated to a key in the cache, making it the
* most recently accessed for eviction purpose. * most recently accessed for eviction purpose.
* *
* @param {string} key - key of which to fetch the associated value * @param key - key of which to fetch the associated value
* @return {object|undefined} - returns the associated value if * @return returns the associated value if
* exists in the cache, or undefined if not found - either if the * exists in the cache, or undefined if not found - either if the
* key was never added or if it has been evicted from the cache. * key was never added or if it has been evicted from the cache.
*/ */
get(key) { get(key: string) {
const entry = this._entryMap[key]; const entry = this._entryMap[key];
if (entry) { if (entry) {
// make the entry the most recently used by re-pushing it // make the entry the most recently used by re-pushing it
@ -74,12 +80,12 @@ class LRUCache {
/** /**
* Remove an entry from the cache if exists * Remove an entry from the cache if exists
* *
* @param {string} key - key to remove * @param key - key to remove
* @return {boolean} true if an entry has been removed, false if * @return true if an entry has been removed, false if
* there was no entry with this key in the cache - either if the * there was no entry with this key in the cache - either if the
* key was never added or if it has been evicted from the cache. * key was never added or if it has been evicted from the cache.
*/ */
remove(key) { remove(key: string): boolean {
const entry = this._entryMap[key]; const entry = this._entryMap[key];
if (entry) { if (entry) {
this._removeEntry(entry); this._removeEntry(entry);
@ -91,16 +97,14 @@ class LRUCache {
/** /**
* Get the current number of cached entries * Get the current number of cached entries
* *
* @return {number} current number of cached entries * @return current number of cached entries
*/ */
count() { count(): number {
return this._entryCount; return this._entryCount;
} }
/** /**
* Remove all entries from the cache * Remove all entries from the cache
*
* @return {undefined}
*/ */
clear() { clear() {
this._entryMap = {}; this._entryMap = {};
@ -113,8 +117,7 @@ class LRUCache {
* Push an entry to the front of the LRU list, making it the most * Push an entry to the front of the LRU list, making it the most
* recently accessed * recently accessed
* *
* @param {object} entry - entry to push * @param entry - entry to push
* @return {undefined}
*/ */
_lruPushEntry(entry) { _lruPushEntry(entry) {
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
@ -133,8 +136,7 @@ class LRUCache {
/** /**
* Remove an entry from the LRU list * Remove an entry from the LRU list
* *
* @param {object} entry - entry to remove * @param entry - entry to remove
* @return {undefined}
*/ */
_lruRemoveEntry(entry) { _lruRemoveEntry(entry) {
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
@ -154,8 +156,7 @@ class LRUCache {
/** /**
* Helper function to remove an existing entry from the cache * Helper function to remove an existing entry from the cache
* *
* @param {object} entry - cache entry to remove * @param entry - cache entry to remove
* @return {undefined}
*/ */
_removeEntry(entry) { _removeEntry(entry) {
this._lruRemoveEntry(entry); this._lruRemoveEntry(entry);
@ -163,5 +164,3 @@ class LRUCache {
this._entryCount -= 1; this._entryCount -= 1;
} }
} }
module.exports = LRUCache;

View File

@ -1,7 +1,4 @@
'use strict'; // eslint-disable-line strict import { FILTER_SKIP, SKIP_NONE } from './tools';
const { FILTER_SKIP, SKIP_NONE } = require('./tools');
// Use a heuristic to amortize the cost of JSON // Use a heuristic to amortize the cost of JSON
// serialization/deserialization only on largest metadata where the // serialization/deserialization only on largest metadata where the
// potential for size reduction is high, considering the bulk of the // potential for size reduction is high, considering the bulk of the
@ -22,7 +19,12 @@ const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
/** /**
* Base class of listing extensions. * Base class of listing extensions.
*/ */
class Extension { export default class Extension {
parameters;
logger;
res?: any[];
keys: number;
/** /**
* This takes a list of parameters and a logger as the inputs. * This takes a list of parameters and a logger as the inputs.
* Derivatives should have their own format regarding parameters. * Derivatives should have their own format regarding parameters.
@ -51,14 +53,14 @@ class Extension {
* heavy unused fields, or left untouched (depending on size * heavy unused fields, or left untouched (depending on size
* heuristics) * heuristics)
*/ */
trimMetadata(value) { trimMetadata(value: string): string {
let ret = undefined; let ret: any = undefined;
if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) { if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
try { try {
ret = JSON.parse(value); ret = JSON.parse(value);
delete ret.location; delete ret.location;
ret = JSON.stringify(ret); ret = JSON.stringify(ret);
} catch (e) { } catch (e: any) {
// Prefer returning an unfiltered data rather than // Prefer returning an unfiltered data rather than
// stopping the service in case of parsing failure. // stopping the service in case of parsing failure.
// The risk of this approach is a potential // The risk of this approach is a potential
@ -66,7 +68,8 @@ class Extension {
// used by repd. // used by repd.
this.logger.warn( this.logger.warn(
'Could not parse Object Metadata while listing', 'Could not parse Object Metadata while listing',
{ err: e.toString() }); { err: e.toString() }
);
} }
} }
return ret || value; return ret || value;
@ -96,7 +99,7 @@ class Extension {
* = 0: entry is accepted but not included (skipping) * = 0: entry is accepted but not included (skipping)
* < 0: entry is not accepted, listing should finish * < 0: entry is not accepted, listing should finish
*/ */
filter(entry) { filter(entry): number {
return entry ? FILTER_SKIP : FILTER_SKIP; return entry ? FILTER_SKIP : FILTER_SKIP;
} }
@ -105,20 +108,18 @@ class Extension {
* because it is skipping a range of delimited keys or a range of specific * because it is skipping a range of delimited keys or a range of specific
* version when doing master version listing. * version when doing master version listing.
* *
* @return {string} - the insight: a common prefix or a master key, * @return the insight: a common prefix or a master key,
* or SKIP_NONE if there is no insight * or SKIP_NONE if there is no insight
*/ */
skipping() { skipping(): string | undefined {
return SKIP_NONE; return SKIP_NONE;
} }
/** /**
* Get the listing resutls. Format depends on derivatives' specific logic. * Get the listing resutls. Format depends on derivatives' specific logic.
* @return {Array} - The listed elements * @return The listed elements
*/ */
result() { result() {
return this.res; return this.res;
} }
} }
module.exports.default = Extension;

View File

@ -1,9 +1,12 @@
'use strict'; // eslint-disable-line strict import {
inc,
const { inc, checkLimit, listingParamsMasterKeysV0ToV1, checkLimit,
FILTER_END, FILTER_ACCEPT } = require('./tools'); listingParamsMasterKeysV0ToV1,
FILTER_END,
FILTER_ACCEPT,
} from './tools';
const DEFAULT_MAX_KEYS = 1000; const DEFAULT_MAX_KEYS = 1000;
const VSConst = require('../../versioning/constants').VersioningConstants; import { VersioningConstants as VSConst } from '../../versioning/constants';
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst; const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
function numberDefault(num, defaultNum) { function numberDefault(num, defaultNum) {
@ -14,7 +17,22 @@ function numberDefault(num, defaultNum) {
/** /**
* Class for the MultipartUploads extension * Class for the MultipartUploads extension
*/ */
class MultipartUploads { export class MultipartUploads {
params
vFormat
CommonPrefixes
Uploads
IsTruncated
NextKeyMarker
NextUploadIdMarker
prefixLength
queryPrefixLength
keys
maxKeys
delimiter
splitter
logger
/** /**
* Constructor of the extension * Constructor of the extension
* Init and check parameters * Init and check parameters
@ -39,22 +57,26 @@ class MultipartUploads {
this.splitter = params.splitter; this.splitter = params.splitter;
this.logger = logger; this.logger = logger;
Object.assign(this, { Object.assign(
[BucketVersioningKeyFormat.v0]: { this,
genMDParams: this.genMDParamsV0, {
getObjectKey: this.getObjectKeyV0, [BucketVersioningKeyFormat.v0]: {
}, genMDParams: this.genMDParamsV0,
[BucketVersioningKeyFormat.v1]: { getObjectKey: this.getObjectKeyV0,
genMDParams: this.genMDParamsV1, },
getObjectKey: this.getObjectKeyV1, [BucketVersioningKeyFormat.v1]: {
}, genMDParams: this.genMDParamsV1,
}[this.vFormat]); getObjectKey: this.getObjectKeyV1,
},
}[this.vFormat]
);
} }
genMDParamsV0() { genMDParamsV0() {
const params = {}; const params = {};
if (this.params.keyMarker) { if (this.params.keyMarker) {
params.gt = `overview${this.params.splitter}` + params.gt =
`overview${this.params.splitter}` +
`${this.params.keyMarker}${this.params.splitter}`; `${this.params.keyMarker}${this.params.splitter}`;
if (this.params.uploadIdMarker) { if (this.params.uploadIdMarker) {
params.gt += `${this.params.uploadIdMarker}`; params.gt += `${this.params.uploadIdMarker}`;
@ -147,14 +169,20 @@ class MultipartUploads {
if (this.delimiter) { if (this.delimiter) {
const mpuPrefixSlice = `overview${this.splitter}`.length; const mpuPrefixSlice = `overview${this.splitter}`.length;
const mpuKey = key.slice(mpuPrefixSlice); const mpuKey = key.slice(mpuPrefixSlice);
const commonPrefixIndex = mpuKey.indexOf(this.delimiter, const commonPrefixIndex = mpuKey.indexOf(
this.queryPrefixLength); this.delimiter,
this.queryPrefixLength
);
if (commonPrefixIndex === -1) { if (commonPrefixIndex === -1) {
this.addUpload(value); this.addUpload(value);
} else { } else {
this.addCommonPrefix(mpuKey.substring(0, this.addCommonPrefix(
commonPrefixIndex + this.delimiter.length)); mpuKey.substring(
0,
commonPrefixIndex + this.delimiter.length
)
);
} }
} else { } else {
this.addUpload(value); this.addUpload(value);
@ -182,7 +210,3 @@ class MultipartUploads {
}; };
} }
} }
module.exports = {
MultipartUploads,
};

View File

@ -1,14 +1,17 @@
'use strict'; // eslint-disable-line strict import Extension from './Extension';
import { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } from './tools';
const Extension = require('./Extension').default;
const { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
const DEFAULT_MAX_KEYS = 10000; const DEFAULT_MAX_KEYS = 10000;
/** /**
* Class of an extension doing the simple listing * Class of an extension doing the simple listing
*/ */
class List extends Extension { export class List extends Extension {
maxKeys: number;
filterKey;
filterKeyStartsWith;
res: any[];
/** /**
* Constructor * Constructor
* Set the logger and the res * Set the logger and the res
@ -30,15 +33,17 @@ class List extends Extension {
} }
genMDParams() { genMDParams() {
const params = this.parameters ? { const params = this.parameters
gt: this.parameters.gt, ? {
gte: this.parameters.gte || this.parameters.start, gt: this.parameters.gt,
lt: this.parameters.lt, gte: this.parameters.gte || this.parameters.start,
lte: this.parameters.lte || this.parameters.end, lt: this.parameters.lt,
keys: this.parameters.keys, lte: this.parameters.lte || this.parameters.end,
values: this.parameters.values, keys: this.parameters.keys,
} : {}; values: this.parameters.values,
Object.keys(params).forEach(key => { }
: {};
Object.keys(params).forEach((key) => {
if (params[key] === null || params[key] === undefined) { if (params[key] === null || params[key] === undefined) {
delete params[key]; delete params[key];
} }
@ -53,29 +58,30 @@ class List extends Extension {
* *
* @return {Boolean} Returns true if matches, else false. * @return {Boolean} Returns true if matches, else false.
*/ */
customFilter(value) { customFilter(value: string): boolean {
let _value; let _value: any;
try { try {
_value = JSON.parse(value); _value = JSON.parse(value);
} catch (e) { } catch (e: any) {
// Prefer returning an unfiltered data rather than // Prefer returning an unfiltered data rather than
// stopping the service in case of parsing failure. // stopping the service in case of parsing failure.
// The risk of this approach is a potential // The risk of this approach is a potential
// reproduction of MD-692, where too much memory is // reproduction of MD-692, where too much memory is
// used by repd. // used by repd.
this.logger.warn( this.logger.warn('Could not parse Object Metadata while listing', {
'Could not parse Object Metadata while listing', err: e.toString(),
{ err: e.toString() }); });
return false; return false;
} }
if (_value.customAttributes !== undefined) { if (_value.customAttributes !== undefined) {
for (const key of Object.keys(_value.customAttributes)) { for (const key of Object.keys(_value.customAttributes)) {
if (this.filterKey !== undefined && if (this.filterKey !== undefined && key === this.filterKey) {
key === this.filterKey) {
return true; return true;
} }
if (this.filterKeyStartsWith !== undefined && if (
key.startsWith(this.filterKeyStartsWith)) { this.filterKeyStartsWith !== undefined &&
key.startsWith(this.filterKeyStartsWith)
) {
return true; return true;
} }
} }
@ -90,15 +96,17 @@ class List extends Extension {
* @return {number} - > 0 : continue listing * @return {number} - > 0 : continue listing
* < 0 : listing done * < 0 : listing done
*/ */
filter(elem) { filter(elem): number {
// Check first in case of maxkeys <= 0 // Check first in case of maxkeys <= 0
if (this.keys >= this.maxKeys) { if (this.keys >= this.maxKeys) {
return FILTER_END; return FILTER_END;
} }
if ((this.filterKey !== undefined || if (
this.filterKeyStartsWith !== undefined) && (this.filterKey !== undefined ||
this.filterKeyStartsWith !== undefined) &&
typeof elem === 'object' && typeof elem === 'object' &&
!this.customFilter(elem.value)) { !this.customFilter(elem.value)
) {
return FILTER_SKIP; return FILTER_SKIP;
} }
if (typeof elem === 'object') { if (typeof elem === 'object') {
@ -121,7 +129,3 @@ class List extends Extension {
return this.res; return this.res;
} }
} }
module.exports = {
List,
};

View File

@ -1,9 +1,12 @@
'use strict'; // eslint-disable-line strict import Extension from './Extension';
import {
const Extension = require('./Extension').default; inc,
const { inc, listingParamsMasterKeysV0ToV1, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools'); FILTER_END,
const VSConst = require('../../versioning/constants').VersioningConstants; FILTER_ACCEPT,
FILTER_SKIP,
} from './tools';
import { VersioningConstants as VSConst } from '../../versioning/constants';
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst; const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
/** /**
@ -14,7 +17,11 @@ const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
* @param {Number} delimiterIndex - 'folder' index in the path * @param {Number} delimiterIndex - 'folder' index in the path
* @return {String} - CommonPrefix * @return {String} - CommonPrefix
*/ */
function getCommonPrefix(key, delimiter, delimiterIndex) { function getCommonPrefix(
key: string,
delimiter: string,
delimiterIndex: number
): string {
return key.substring(0, delimiterIndex + delimiter.length); return key.substring(0, delimiterIndex + delimiter.length);
} }
@ -30,7 +37,25 @@ function getCommonPrefix(key, delimiter, delimiterIndex) {
* @prop {String|undefined} prefix - prefix per amazon format * @prop {String|undefined} prefix - prefix per amazon format
* @prop {Number} maxKeys - number of keys to list * @prop {Number} maxKeys - number of keys to list
*/ */
class Delimiter extends Extension { export class Delimiter extends Extension {
CommonPrefixes: string[];
Contents: string[];
IsTruncated: boolean;
NextMarker?: string;
keys: number;
delimiter?: string;
prefix?: string;
maxKeys: number;
marker;
startAfter;
continuationToken;
alphabeticalOrder;
vFormat;
NextContinuationToken;
startMarker;
continueMarker;
nextContinueMarker;
/** /**
* Create a new Delimiter instance * Create a new Delimiter instance
* @constructor * @constructor
@ -58,6 +83,7 @@ class Delimiter extends Extension {
constructor(parameters, logger, vFormat) { constructor(parameters, logger, vFormat) {
super(parameters, logger); super(parameters, logger);
// original listing parameters // original listing parameters
this.keys = 0;
this.delimiter = parameters.delimiter; this.delimiter = parameters.delimiter;
this.prefix = parameters.prefix; this.prefix = parameters.prefix;
this.marker = parameters.marker; this.marker = parameters.marker;
@ -65,8 +91,9 @@ class Delimiter extends Extension {
this.startAfter = parameters.startAfter; this.startAfter = parameters.startAfter;
this.continuationToken = parameters.continuationToken; this.continuationToken = parameters.continuationToken;
this.alphabeticalOrder = this.alphabeticalOrder =
typeof parameters.alphabeticalOrder !== 'undefined' ? typeof parameters.alphabeticalOrder !== 'undefined'
parameters.alphabeticalOrder : true; ? parameters.alphabeticalOrder
: true;
this.vFormat = vFormat || BucketVersioningKeyFormat.v0; this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
// results // results
@ -79,36 +106,44 @@ class Delimiter extends Extension {
this.startMarker = parameters.v2 ? 'startAfter' : 'marker'; this.startMarker = parameters.v2 ? 'startAfter' : 'marker';
this.continueMarker = parameters.v2 ? 'continuationToken' : 'marker'; this.continueMarker = parameters.v2 ? 'continuationToken' : 'marker';
this.nextContinueMarker = parameters.v2 ? this.nextContinueMarker = parameters.v2
'NextContinuationToken' : 'NextMarker'; ? 'NextContinuationToken'
: 'NextMarker';
if (this.delimiter !== undefined && if (
this.delimiter !== undefined &&
this[this.nextContinueMarker] !== undefined && this[this.nextContinueMarker] !== undefined &&
this[this.nextContinueMarker].startsWith(this.prefix || '')) { this[this.nextContinueMarker].startsWith(this.prefix || '')
const nextDelimiterIndex = ) {
this[this.nextContinueMarker].indexOf(this.delimiter, const nextDelimiterIndex = this[this.nextContinueMarker].indexOf(
this.prefix ? this.prefix.length : 0); this.delimiter,
this[this.nextContinueMarker] = this.prefix ? this.prefix.length : 0
this[this.nextContinueMarker].slice(0, nextDelimiterIndex + );
this.delimiter.length); this[this.nextContinueMarker] = this[this.nextContinueMarker].slice(
0,
nextDelimiterIndex + this.delimiter.length
);
} }
Object.assign(this, { Object.assign(
[BucketVersioningKeyFormat.v0]: { this,
genMDParams: this.genMDParamsV0, {
getObjectKey: this.getObjectKeyV0, [BucketVersioningKeyFormat.v0]: {
skipping: this.skippingV0, genMDParams: this.genMDParamsV0,
}, getObjectKey: this.getObjectKeyV0,
[BucketVersioningKeyFormat.v1]: { skipping: this.skippingV0,
genMDParams: this.genMDParamsV1, },
getObjectKey: this.getObjectKeyV1, [BucketVersioningKeyFormat.v1]: {
skipping: this.skippingV1, genMDParams: this.genMDParamsV1,
}, getObjectKey: this.getObjectKeyV1,
}[this.vFormat]); skipping: this.skippingV1,
},
}[this.vFormat]
);
} }
genMDParamsV0() { genMDParamsV0() {
const params = {}; const params: { gte?: string; lt?: string; gt?: string } = {};
if (this.prefix) { if (this.prefix) {
params.gte = this.prefix; params.gte = this.prefix;
params.lt = inc(this.prefix); params.lt = inc(this.prefix);
@ -134,7 +169,7 @@ class Delimiter extends Extension {
* final state of the result if it is the case * final state of the result if it is the case
* @return {Boolean} - indicates if the iteration has to stop * @return {Boolean} - indicates if the iteration has to stop
*/ */
_reachedMaxKeys() { _reachedMaxKeys(): boolean {
if (this.keys >= this.maxKeys) { if (this.keys >= this.maxKeys) {
// In cases of maxKeys <= 0 -> IsTruncated = false // In cases of maxKeys <= 0 -> IsTruncated = false
this.IsTruncated = this.maxKeys > 0; this.IsTruncated = this.maxKeys > 0;
@ -151,7 +186,7 @@ class Delimiter extends Extension {
* @param {String} value - The value of the key * @param {String} value - The value of the key
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
addContents(key, value) { addContents(key: string, value: string): number {
if (this._reachedMaxKeys()) { if (this._reachedMaxKeys()) {
return FILTER_END; return FILTER_END;
} }
@ -161,11 +196,11 @@ class Delimiter extends Extension {
return FILTER_ACCEPT; return FILTER_ACCEPT;
} }
getObjectKeyV0(obj) { getObjectKeyV0(obj: { key: string }) {
return obj.key; return obj.key;
} }
getObjectKeyV1(obj) { getObjectKeyV1(obj: { key: string }) {
return obj.key.slice(DbPrefixes.Master.length); return obj.key.slice(DbPrefixes.Master.length);
} }
@ -180,13 +215,15 @@ class Delimiter extends Extension {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filter(obj) { filter(obj: { key: string; value: string }): number {
const key = this.getObjectKey(obj); const key = this.getObjectKey(obj);
const value = obj.value; const value = obj.value;
if ((this.prefix && !key.startsWith(this.prefix)) if (
|| (this.alphabeticalOrder (this.prefix && !key.startsWith(this.prefix)) ||
&& typeof this[this.nextContinueMarker] === 'string' (this.alphabeticalOrder &&
&& key <= this[this.nextContinueMarker])) { typeof this[this.nextContinueMarker] === 'string' &&
key <= this[this.nextContinueMarker])
) {
return FILTER_SKIP; return FILTER_SKIP;
} }
if (this.delimiter) { if (this.delimiter) {
@ -206,10 +243,12 @@ class Delimiter extends Extension {
* @param {Number} index - after prefix starting point * @param {Number} index - after prefix starting point
* @return {Boolean} - indicates if iteration should continue * @return {Boolean} - indicates if iteration should continue
*/ */
addCommonPrefix(key, index) { addCommonPrefix(key: string, index: number): boolean {
const commonPrefix = getCommonPrefix(key, this.delimiter, index); const commonPrefix = getCommonPrefix(key, this.delimiter, index);
if (this.CommonPrefixes.indexOf(commonPrefix) === -1 if (
&& this[this.nextContinueMarker] !== commonPrefix) { this.CommonPrefixes.indexOf(commonPrefix) === -1 &&
this[this.nextContinueMarker] !== commonPrefix
) {
if (this._reachedMaxKeys()) { if (this._reachedMaxKeys()) {
return FILTER_END; return FILTER_END;
} }
@ -228,7 +267,7 @@ class Delimiter extends Extension {
* @return {string} - the present range (NextMarker) if repd believes * @return {string} - the present range (NextMarker) if repd believes
* that it's enough and should move on * that it's enough and should move on
*/ */
skippingV0() { skippingV0(): string {
return this[this.nextContinueMarker]; return this[this.nextContinueMarker];
} }
@ -239,7 +278,7 @@ class Delimiter extends Extension {
* @return {string} - the present range (NextMarker) if repd believes * @return {string} - the present range (NextMarker) if repd believes
* that it's enough and should move on * that it's enough and should move on
*/ */
skippingV1() { skippingV1(): string {
return DbPrefixes.Master + this[this.nextContinueMarker]; return DbPrefixes.Master + this[this.nextContinueMarker];
} }
@ -261,14 +300,17 @@ class Delimiter extends Extension {
Delimiter: this.delimiter, Delimiter: this.delimiter,
}; };
if (this.parameters.v2) { if (this.parameters.v2) {
//
result.NextContinuationToken = this.IsTruncated result.NextContinuationToken = this.IsTruncated
? this.NextContinuationToken : undefined; ? this.NextContinuationToken
: undefined;
} else { } else {
result.NextMarker = (this.IsTruncated && this.delimiter) //
? this.NextMarker : undefined; result.NextMarker =
this.IsTruncated && this.delimiter
? this.NextMarker
: undefined;
} }
return result; return result;
} }
} }
module.exports = { Delimiter };

View File

@ -1,10 +1,8 @@
'use strict'; // eslint-disable-line strict import { Delimiter } from './delimiter';
import { Version } from '../../versioning/Version';
const Delimiter = require('./delimiter').Delimiter; import { VersioningConstants as VSConst } from '../../versioning/constants';
const Version = require('../../versioning/Version').Version;
const VSConst = require('../../versioning/constants').VersioningConstants;
const { BucketVersioningKeyFormat } = VSConst; const { BucketVersioningKeyFormat } = VSConst;
const { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools'); import { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } from './tools';
const VID_SEP = VSConst.VersionId.Separator; const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes } = VSConst; const { DbPrefixes } = VSConst;
@ -13,7 +11,11 @@ const { DbPrefixes } = VSConst;
* Handle object listing with parameters. This extends the base class Delimiter * Handle object listing with parameters. This extends the base class Delimiter
* to return the raw master versions of existing objects. * to return the raw master versions of existing objects.
*/ */
class DelimiterMaster extends Delimiter { export class DelimiterMaster extends Delimiter {
prvKey;
prvPHDKey;
inReplayPrefix;
/** /**
* Delimiter listing of master versions. * Delimiter listing of master versions.
* @param {Object} parameters - listing parameters * @param {Object} parameters - listing parameters
@ -34,16 +36,19 @@ class DelimiterMaster extends Delimiter {
this.prvPHDKey = undefined; this.prvPHDKey = undefined;
this.inReplayPrefix = false; this.inReplayPrefix = false;
Object.assign(this, { Object.assign(
[BucketVersioningKeyFormat.v0]: { this,
filter: this.filterV0, {
skipping: this.skippingV0, [BucketVersioningKeyFormat.v0]: {
}, filter: this.filterV0,
[BucketVersioningKeyFormat.v1]: { skipping: this.skippingV0,
filter: this.filterV1, },
skipping: this.skippingV1, [BucketVersioningKeyFormat.v1]: {
}, filter: this.filterV1,
}[this.vFormat]); skipping: this.skippingV1,
},
}[this.vFormat]
);
} }
/** /**
@ -58,7 +63,7 @@ class DelimiterMaster extends Delimiter {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filterV0(obj) { filterV0(obj: { key: string; value: string }): number {
let key = obj.key; let key = obj.key;
const value = obj.value; const value = obj.value;
@ -70,9 +75,11 @@ class DelimiterMaster extends Delimiter {
/* Skip keys not starting with the prefix or not alphabetically /* Skip keys not starting with the prefix or not alphabetically
* ordered. */ * ordered. */
if ((this.prefix && !key.startsWith(this.prefix)) if (
|| (typeof this[this.nextContinueMarker] === 'string' && (this.prefix && !key.startsWith(this.prefix)) ||
key <= this[this.nextContinueMarker])) { (typeof this[this.nextContinueMarker] === 'string' &&
key <= this[this.nextContinueMarker])
) {
return FILTER_SKIP; return FILTER_SKIP;
} }
@ -95,9 +102,12 @@ class DelimiterMaster extends Delimiter {
* NextMarker to the common prefix instead of the whole key * NextMarker to the common prefix instead of the whole key
* value. (TODO: remove this test once ZENKO-1048 is fixed) * value. (TODO: remove this test once ZENKO-1048 is fixed)
* */ * */
if (key === this.prvKey || key === this[this.nextContinueMarker] || if (
key === this.prvKey ||
key === this[this.nextContinueMarker] ||
(this.delimiter && (this.delimiter &&
key.startsWith(this[this.nextContinueMarker]))) { key.startsWith(this[this.nextContinueMarker]))
) {
/* master version already filtered */ /* master version already filtered */
return FILTER_SKIP; return FILTER_SKIP;
} }
@ -155,7 +165,7 @@ class DelimiterMaster extends Delimiter {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filterV1(obj) { filterV1(obj: { key: string; value: string }): number {
// Filtering master keys in v1 is simply listing the master // Filtering master keys in v1 is simply listing the master
// keys, as the state of version keys do not change the // keys, as the state of version keys do not change the
// result, so we can use Delimiter method directly. // result, so we can use Delimiter method directly.
@ -167,8 +177,9 @@ class DelimiterMaster extends Delimiter {
// next marker or next continuation token: // next marker or next continuation token:
// - foo/ : skipping foo/ // - foo/ : skipping foo/
// - foo : skipping foo. // - foo : skipping foo.
const index = this[this.nextContinueMarker]. const index = this[this.nextContinueMarker].lastIndexOf(
lastIndexOf(this.delimiter); this.delimiter
);
if (index === this[this.nextContinueMarker].length - 1) { if (index === this[this.nextContinueMarker].length - 1) {
return this[this.nextContinueMarker]; return this[this.nextContinueMarker];
} }
@ -192,5 +203,3 @@ class DelimiterMaster extends Delimiter {
return DbPrefixes.Master + skipTo; return DbPrefixes.Master + skipTo;
} }
} }
module.exports = { DelimiterMaster };

View File

@ -1,10 +1,13 @@
'use strict'; // eslint-disable-line strict import { Delimiter } from './delimiter';
import { Version } from '../../versioning/Version';
const Delimiter = require('./delimiter').Delimiter; import { VersioningConstants as VSConst } from '../../versioning/constants';
const Version = require('../../versioning/Version').Version; import {
const VSConst = require('../../versioning/constants').VersioningConstants; inc,
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = FILTER_END,
require('./tools'); FILTER_ACCEPT,
FILTER_SKIP,
SKIP_NONE,
} from './tools';
const VID_SEP = VSConst.VersionId.Separator; const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst; const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
@ -21,7 +24,16 @@ const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
* @prop {String|undefined} prefix - prefix per amazon format * @prop {String|undefined} prefix - prefix per amazon format
* @prop {Number} maxKeys - number of keys to list * @prop {Number} maxKeys - number of keys to list
*/ */
class DelimiterVersions extends Delimiter { export class DelimiterVersions extends Delimiter {
CommonPrefixes: string[];
Contents: string[];
IsTruncated: boolean;
NextMarker?: string;
keys: number;
delimiter?: string;
prefix?: string;
maxKeys: number;
constructor(parameters, logger, vFormat) { constructor(parameters, logger, vFormat) {
super(parameters, logger, vFormat); super(parameters, logger, vFormat);
// specific to version listing // specific to version listing
@ -35,18 +47,21 @@ class DelimiterVersions extends Delimiter {
this.NextVersionIdMarker = undefined; this.NextVersionIdMarker = undefined;
this.inReplayPrefix = false; this.inReplayPrefix = false;
Object.assign(this, { Object.assign(
[BucketVersioningKeyFormat.v0]: { this,
genMDParams: this.genMDParamsV0, {
filter: this.filterV0, [BucketVersioningKeyFormat.v0]: {
skipping: this.skippingV0, genMDParams: this.genMDParamsV0,
}, filter: this.filterV0,
[BucketVersioningKeyFormat.v1]: { skipping: this.skippingV0,
genMDParams: this.genMDParamsV1, },
filter: this.filterV1, [BucketVersioningKeyFormat.v1]: {
skipping: this.skippingV1, genMDParams: this.genMDParamsV1,
}, filter: this.filterV1,
}[this.vFormat]); skipping: this.skippingV1,
},
}[this.vFormat]
);
} }
genMDParamsV0() { genMDParamsV0() {
@ -63,9 +78,10 @@ class DelimiterVersions extends Delimiter {
if (this.parameters.versionIdMarker) { if (this.parameters.versionIdMarker) {
// versionIdMarker should always come with keyMarker // versionIdMarker should always come with keyMarker
// but may not be the other way around // but may not be the other way around
params.gt = this.parameters.keyMarker params.gt =
+ VID_SEP this.parameters.keyMarker +
+ this.parameters.versionIdMarker; VID_SEP +
this.parameters.versionIdMarker;
} else { } else {
params.gt = inc(this.parameters.keyMarker + VID_SEP); params.gt = inc(this.parameters.keyMarker + VID_SEP);
} }
@ -89,20 +105,27 @@ class DelimiterVersions extends Delimiter {
params[1].lt = inc(DbPrefixes.Version); // stop after the last version key params[1].lt = inc(DbPrefixes.Version); // stop after the last version key
} }
if (this.parameters.keyMarker) { if (this.parameters.keyMarker) {
if (params[1].gte <= DbPrefixes.Version + this.parameters.keyMarker) { if (
params[1].gte <=
DbPrefixes.Version + this.parameters.keyMarker
) {
delete params[0].gte; delete params[0].gte;
delete params[1].gte; delete params[1].gte;
params[0].gt = DbPrefixes.Master + inc(this.parameters.keyMarker + VID_SEP); params[0].gt =
DbPrefixes.Master +
inc(this.parameters.keyMarker + VID_SEP);
if (this.parameters.versionIdMarker) { if (this.parameters.versionIdMarker) {
// versionIdMarker should always come with keyMarker // versionIdMarker should always come with keyMarker
// but may not be the other way around // but may not be the other way around
params[1].gt = DbPrefixes.Version params[1].gt =
+ this.parameters.keyMarker DbPrefixes.Version +
+ VID_SEP this.parameters.keyMarker +
+ this.parameters.versionIdMarker; VID_SEP +
this.parameters.versionIdMarker;
} else { } else {
params[1].gt = DbPrefixes.Version params[1].gt =
+ inc(this.parameters.keyMarker + VID_SEP); DbPrefixes.Version +
inc(this.parameters.keyMarker + VID_SEP);
} }
} }
} }
@ -120,7 +143,7 @@ class DelimiterVersions extends Delimiter {
* * -1 if master key < version key * * -1 if master key < version key
* * 1 if master key > version key * * 1 if master key > version key
*/ */
compareObjects(masterObj, versionObj) { compareObjects(masterObj, versionObj): number {
const masterKey = masterObj.key.slice(DbPrefixes.Master.length); const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
const versionKey = versionObj.key.slice(DbPrefixes.Version.length); const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
return masterKey < versionKey ? -1 : 1; return masterKey < versionKey ? -1 : 1;
@ -136,7 +159,11 @@ class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the key * @param {String} obj.value - The value of the key
* @return {Boolean} - indicates if iteration should continue * @return {Boolean} - indicates if iteration should continue
*/ */
addContents(obj) { addContents(obj: {
key: string;
versionId: string;
value: string;
}): boolean {
if (this._reachedMaxKeys()) { if (this._reachedMaxKeys()) {
return FILTER_END; return FILTER_END;
} }
@ -163,7 +190,7 @@ class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filterV0(obj) { filterV0(obj: { key: string; value: string }): number {
if (obj.key.startsWith(DbPrefixes.Replay)) { if (obj.key.startsWith(DbPrefixes.Replay)) {
this.inReplayPrefix = true; this.inReplayPrefix = true;
return FILTER_SKIP; return FILTER_SKIP;
@ -189,12 +216,14 @@ class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filterV1(obj) { filterV1(obj: { key: string; value: string }): number {
// this function receives both M and V keys, but their prefix // this function receives both M and V keys, but their prefix
// length is the same so we can remove their prefix without // length is the same so we can remove their prefix without
// looking at the type of key // looking at the type of key
return this.filterCommon(obj.key.slice(DbPrefixes.Master.length), return this.filterCommon(
obj.value); obj.key.slice(DbPrefixes.Master.length),
obj.value
);
} }
filterCommon(key, value) { filterCommon(key, value) {
@ -207,14 +236,16 @@ class DelimiterVersions extends Delimiter {
if (versionIdIndex < 0) { if (versionIdIndex < 0) {
nonversionedKey = key; nonversionedKey = key;
this.masterKey = key; this.masterKey = key;
this.masterVersionId = this.masterVersionId = Version.from(value).getVersionId() || 'null';
Version.from(value).getVersionId() || 'null';
versionId = this.masterVersionId; versionId = this.masterVersionId;
} else { } else {
nonversionedKey = key.slice(0, versionIdIndex); nonversionedKey = key.slice(0, versionIdIndex);
versionId = key.slice(versionIdIndex + 1); versionId = key.slice(versionIdIndex + 1);
// skip a version key if it is the master version // skip a version key if it is the master version
if (this.masterKey === nonversionedKey && this.masterVersionId === versionId) { if (
this.masterKey === nonversionedKey &&
this.masterVersionId === versionId
) {
return FILTER_SKIP; return FILTER_SKIP;
} }
this.masterKey = undefined; this.masterKey = undefined;
@ -222,7 +253,10 @@ class DelimiterVersions extends Delimiter {
} }
if (this.delimiter) { if (this.delimiter) {
const baseIndex = this.prefix ? this.prefix.length : 0; const baseIndex = this.prefix ? this.prefix.length : 0;
const delimiterIndex = nonversionedKey.indexOf(this.delimiter, baseIndex); const delimiterIndex = nonversionedKey.indexOf(
this.delimiter,
baseIndex
);
if (delimiterIndex >= 0) { if (delimiterIndex >= 0) {
return this.addCommonPrefix(nonversionedKey, delimiterIndex); return this.addCommonPrefix(nonversionedKey, delimiterIndex);
} }
@ -249,8 +283,7 @@ class DelimiterVersions extends Delimiter {
return SKIP_NONE; return SKIP_NONE;
} }
// skip to the same object key in both M and V range listings // skip to the same object key in both M and V range listings
return [DbPrefixes.Master + skipV0, return [DbPrefixes.Master + skipV0, DbPrefixes.Version + skipV0];
DbPrefixes.Version + skipV0];
} }
/** /**
@ -269,11 +302,10 @@ class DelimiterVersions extends Delimiter {
Versions: this.Contents, Versions: this.Contents,
IsTruncated: this.IsTruncated, IsTruncated: this.IsTruncated,
NextKeyMarker: this.IsTruncated ? this.NextMarker : undefined, NextKeyMarker: this.IsTruncated ? this.NextMarker : undefined,
NextVersionIdMarker: this.IsTruncated ? NextVersionIdMarker: this.IsTruncated
this.NextVersionIdMarker : undefined, ? this.NextVersionIdMarker
: undefined,
Delimiter: this.delimiter, Delimiter: this.delimiter,
}; };
} }
} }
module.exports = { DelimiterVersions };

View File

@ -1,21 +1,25 @@
const assert = require('assert'); import assert from 'assert';
import { FILTER_END, FILTER_SKIP, SKIP_NONE } from './tools';
const { FILTER_END, FILTER_SKIP, SKIP_NONE } = require('./tools');
const MAX_STREAK_LENGTH = 100; const MAX_STREAK_LENGTH = 100;
/** /**
* Handle the filtering and the skip mechanism of a listing result. * Handle the filtering and the skip mechanism of a listing result.
*/ */
class Skip { export class Skip {
extension;
gteParams;
listingEndCb;
skipRangeCb;
streakLength;
/** /**
* @param {Object} params - skip parameters * @param {Object} params - skip parameters
* @param {Object} params.extension - delimiter extension used (required) * @param {Object} params.extension - delimiter extension used (required)
* @param {String} params.gte - current range gte (greater than or * @param {String} params.gte - current range gte (greater than or
* equal) used by the client code * equal) used by the client code
*/ */
constructor(params) { constructor(params: { extension: any; gte: string }) {
assert(params.extension); assert(params.extension);
this.extension = params.extension; this.extension = params.extension;
@ -47,7 +51,7 @@ class Skip {
* This function calls the listing end or the skip range callbacks if * This function calls the listing end or the skip range callbacks if
* needed. * needed.
*/ */
filter(entry) { filter(entry): void {
assert(this.listingEndCb); assert(this.listingEndCb);
assert(this.skipRangeCb); assert(this.skipRangeCb);
@ -56,8 +60,10 @@ class Skip {
if (filteringResult === FILTER_END) { if (filteringResult === FILTER_END) {
this.listingEndCb(); this.listingEndCb();
} else if (filteringResult === FILTER_SKIP } else if (
&& skippingRange !== SKIP_NONE) { filteringResult === FILTER_SKIP &&
skippingRange !== SKIP_NONE
) {
if (++this.streakLength >= MAX_STREAK_LENGTH) { if (++this.streakLength >= MAX_STREAK_LENGTH) {
const newRange = this._inc(skippingRange); const newRange = this._inc(skippingRange);
@ -73,7 +79,7 @@ class Skip {
} }
} }
_inc(str) { _inc(str: string) {
if (!str) { if (!str) {
return str; return str;
} }
@ -83,6 +89,3 @@ class Skip {
return `${str.slice(0, str.length - 1)}${lastCharNewValue}`; return `${str.slice(0, str.length - 1)}${lastCharNewValue}`;
} }
} }
module.exports = Skip;

View File

@ -1,10 +1,11 @@
const { DbPrefixes } = require('../../versioning/constants').VersioningConstants; import { VersioningConstants as VSConst } from '../../versioning/constants';
const { DbPrefixes } = VSConst
// constants for extensions // constants for extensions
const SKIP_NONE = undefined; // to be inline with the values of NextMarker export const SKIP_NONE = undefined; // to be inline with the values of NextMarker
const FILTER_ACCEPT = 1; export const FILTER_ACCEPT = 1;
const FILTER_SKIP = 0; export const FILTER_SKIP = 0;
const FILTER_END = -1; export const FILTER_END = -1;
/** /**
* This function check if number is valid * This function check if number is valid
@ -15,8 +16,8 @@ const FILTER_END = -1;
* @param {Number} limit - The limit to respect * @param {Number} limit - The limit to respect
* @return {Number} - The parsed number || limit * @return {Number} - The parsed number || limit
*/ */
function checkLimit(number, limit) { export function checkLimit(number: number, limit: number): number {
const parsed = Number.parseInt(number, 10); const parsed = Number.parseInt(number, 10)
const valid = !Number.isNaN(parsed) && (!limit || parsed <= limit); const valid = !Number.isNaN(parsed) && (!limit || parsed <= limit);
return valid ? parsed : limit; return valid ? parsed : limit;
} }
@ -28,7 +29,7 @@ function checkLimit(number, limit) {
* @return {string} - the incremented string * @return {string} - the incremented string
* or the input if it is not valid * or the input if it is not valid
*/ */
function inc(str) { export function inc(str: string): string {
return str ? (str.slice(0, str.length - 1) + return str ? (str.slice(0, str.length - 1) +
String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str; String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str;
} }
@ -40,7 +41,7 @@ function inc(str) {
* @param {object} v0params - listing parameters for v0 format * @param {object} v0params - listing parameters for v0 format
* @return {object} - listing parameters for v1 format * @return {object} - listing parameters for v1 format
*/ */
function listingParamsMasterKeysV0ToV1(v0params) { export function listingParamsMasterKeysV0ToV1(v0params: any): any {
const v1params = Object.assign({}, v0params); const v1params = Object.assign({}, v0params);
if (v0params.gt !== undefined) { if (v0params.gt !== undefined) {
v1params.gt = `${DbPrefixes.Master}${v0params.gt}`; v1params.gt = `${DbPrefixes.Master}${v0params.gt}`;
@ -58,13 +59,3 @@ function listingParamsMasterKeysV0ToV1(v0params) {
} }
return v1params; return v1params;
} }
module.exports = {
checkLimit,
inc,
listingParamsMasterKeysV0ToV1,
SKIP_NONE,
FILTER_END,
FILTER_SKIP,
FILTER_ACCEPT,
};

View File

@ -1,4 +1,4 @@
function indexOf(arr, value) { export function indexOf<T>(arr: T[], value: T) {
if (!arr.length) { if (!arr.length) {
return -1; return -1;
} }
@ -22,10 +22,10 @@ function indexOf(arr, value) {
return -1; return -1;
} }
function indexAtOrBelow(arr, value) { export function indexAtOrBelow<T>(arr: T[], value: T) {
let i; let i: number;
let lo; let lo: number;
let hi; let hi: number;
if (!arr.length || arr[0] > value) { if (!arr.length || arr[0] > value) {
return -1; return -1;
@ -52,7 +52,7 @@ function indexAtOrBelow(arr, value) {
/* /*
* perform symmetric diff in O(m + n) * perform symmetric diff in O(m + n)
*/ */
function symDiff(k1, k2, v1, v2, cb) { export function symDiff(k1, k2, v1, v2, cb) {
let i = 0; let i = 0;
let j = 0; let j = 0;
const n = k1.length; const n = k1.length;
@ -79,9 +79,3 @@ function symDiff(k1, k2, v1, v2, cb) {
j++; j++;
} }
} }
module.exports = {
indexOf,
indexAtOrBelow,
symDiff,
};

View File

@ -1,14 +1,12 @@
const ArrayUtils = require('./ArrayUtils'); import * as ArrayUtils from './ArrayUtils';
class SortedSet { export default class SortedSet<Key, Value> {
keys: Key[];
values: Value[];
constructor(obj) { constructor(obj?: { keys: Key[]; values: Value[] }) {
if (obj) { this.keys = obj?.keys ?? [];
this.keys = obj.keys; this.values = obj?.values ?? [];
this.values = obj.values;
} else {
this.clear();
}
} }
clear() { clear() {
@ -20,7 +18,7 @@ class SortedSet {
return this.keys.length; return this.keys.length;
} }
set(key, value) { set(key: Key, value: Value) {
const index = ArrayUtils.indexAtOrBelow(this.keys, key); const index = ArrayUtils.indexAtOrBelow(this.keys, key);
if (this.keys[index] === key) { if (this.keys[index] === key) {
this.values[index] = value; this.values[index] = value;
@ -30,17 +28,17 @@ class SortedSet {
this.values.splice(index + 1, 0, value); this.values.splice(index + 1, 0, value);
} }
isSet(key) { isSet(key: Key) {
const index = ArrayUtils.indexOf(this.keys, key); const index = ArrayUtils.indexOf(this.keys, key);
return index >= 0; return index >= 0;
} }
get(key) { get(key: Key) {
const index = ArrayUtils.indexOf(this.keys, key); const index = ArrayUtils.indexOf(this.keys, key);
return index >= 0 ? this.values[index] : undefined; return index >= 0 ? this.values[index] : undefined;
} }
del(key) { del(key: Key) {
const index = ArrayUtils.indexOf(this.keys, key); const index = ArrayUtils.indexOf(this.keys, key);
if (index >= 0) { if (index >= 0) {
this.keys.splice(index, 1); this.keys.splice(index, 1);
@ -48,5 +46,3 @@ class SortedSet {
} }
} }
} }
module.exports = SortedSet;

View File

@ -1,7 +1,17 @@
const stream = require('stream'); import stream from 'stream';
class MergeStream extends stream.Readable { export default class MergeStream extends stream.Readable {
constructor(stream1, stream2, compare) { _compare: (a: any, b: any) => number;
_streams: [stream.Readable, stream.Readable];
_peekItems: [undefined | null, undefined | null];
_streamEof: [boolean, boolean];
_streamToResume: stream.Readable | null;
constructor(
stream1: stream.Readable,
stream2: stream.Readable,
compare: (a: any, b: any) => number
) {
super({ objectMode: true }); super({ objectMode: true });
this._compare = compare; this._compare = compare;
@ -16,13 +26,13 @@ class MergeStream extends stream.Readable {
this._streamEof = [false, false]; this._streamEof = [false, false];
this._streamToResume = null; this._streamToResume = null;
stream1.on('data', item => this._onItem(stream1, item, 0, 1)); stream1.on('data', (item) => this._onItem(stream1, item, 0, 1));
stream1.once('end', () => this._onEnd(stream1, 0, 1)); stream1.once('end', () => this._onEnd(stream1, 0, 1));
stream1.once('error', err => this._onError(stream1, err, 0, 1)); stream1.once('error', (err) => this._onError(stream1, err, 0, 1));
stream2.on('data', item => this._onItem(stream2, item, 1, 0)); stream2.on('data', (item) => this._onItem(stream2, item, 1, 0));
stream2.once('end', () => this._onEnd(stream2, 1, 0)); stream2.once('end', () => this._onEnd(stream2, 1, 0));
stream2.once('error', err => this._onError(stream2, err, 1, 0)); stream2.once('error', (err) => this._onError(stream2, err, 1, 0));
} }
_read() { _read() {
@ -41,7 +51,7 @@ class MergeStream extends stream.Readable {
callback(); callback();
} }
_onItem(myStream, myItem, myIndex, otherIndex) { _onItem(myStream: stream.Readable, myItem, myIndex, otherIndex) {
this._peekItems[myIndex] = myItem; this._peekItems[myIndex] = myItem;
const otherItem = this._peekItems[otherIndex]; const otherItem = this._peekItems[otherIndex];
if (otherItem === undefined) { if (otherItem === undefined) {
@ -69,7 +79,7 @@ class MergeStream extends stream.Readable {
return undefined; return undefined;
} }
_onEnd(myStream, myIndex, otherIndex) { _onEnd(myStream: stream.Readable, myIndex, otherIndex) {
this._streamEof[myIndex] = true; this._streamEof[myIndex] = true;
if (this._peekItems[myIndex] === undefined) { if (this._peekItems[myIndex] === undefined) {
this._peekItems[myIndex] = null; this._peekItems[myIndex] = null;
@ -94,7 +104,7 @@ class MergeStream extends stream.Readable {
return otherStream.resume(); return otherStream.resume();
} }
_onError(myStream, err, myIndex, otherIndex) { _onError(myStream: stream.Readable, err, myIndex, otherIndex) {
myStream.destroy(); myStream.destroy();
if (this._streams[otherIndex]) { if (this._streams[otherIndex]) {
this._streams[otherIndex].destroy(); this._streams[otherIndex].destroy();
@ -102,5 +112,3 @@ class MergeStream extends stream.Readable {
this.emit('error', err); this.emit('error', err);
} }
} }
module.exports = MergeStream;

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line strict import * as constants from '../constants';
const constants = require('../constants');
/** /**
* Class containing requester's information received from Vault * Class containing requester's information received from Vault
@ -9,7 +7,14 @@ const constants = require('../constants');
* @return {AuthInfo} an AuthInfo instance * @return {AuthInfo} an AuthInfo instance
*/ */
class AuthInfo { export default class AuthInfo {
arn
canonicalID
shortid
email
accountDisplayName
IAMdisplayName
constructor(objectFromVault) { constructor(objectFromVault) {
// amazon resource name for IAM user (if applicable) // amazon resource name for IAM user (if applicable)
this.arn = objectFromVault.arn; this.arn = objectFromVault.arn;
@ -50,13 +55,12 @@ class AuthInfo {
return this.canonicalID === constants.publicId; return this.canonicalID === constants.publicId;
} }
isRequesterAServiceAccount() { isRequesterAServiceAccount() {
return this.canonicalID.startsWith( return this.canonicalID.startsWith(`${constants.zenkoServiceAccount}/`);
`${constants.zenkoServiceAccount}/`);
} }
isRequesterThisServiceAccount(serviceName) { isRequesterThisServiceAccount(serviceName) {
return this.canonicalID === return (
`${constants.zenkoServiceAccount}/${serviceName}`; this.canonicalID ===
`${constants.zenkoServiceAccount}/${serviceName}`
);
} }
} }
module.exports = AuthInfo;

View File

@ -1,5 +1,5 @@
const errors = require('../errors'); import errors from '../errors';
const AuthInfo = require('./AuthInfo'); import AuthInfo from './AuthInfo';
/** vaultSignatureCb parses message from Vault and instantiates /** vaultSignatureCb parses message from Vault and instantiates
* @param {object} err - error from vault * @param {object} err - error from vault
@ -39,7 +39,10 @@ function vaultSignatureCb(err, authInfo, log, callback, streamingV4Params) {
* authentication backends. * authentication backends.
* @class Vault * @class Vault
*/ */
class Vault { export default class Vault {
client
implName
/** /**
* @constructor * @constructor
* @param {object} client - authentication backend or vault client * @param {object} client - authentication backend or vault client
@ -91,7 +94,7 @@ class Vault {
requestContext: serializedRCsArr, requestContext: serializedRCsArr,
}, },
(err, userInfo) => vaultSignatureCb(err, userInfo, (err, userInfo) => vaultSignatureCb(err, userInfo,
params.log, callback) params.log, callback),
); );
} }
@ -146,7 +149,7 @@ class Vault {
requestContext: serializedRCs, requestContext: serializedRCs,
}, },
(err, userInfo) => vaultSignatureCb(err, userInfo, (err, userInfo) => vaultSignatureCb(err, userInfo,
params.log, callback, streamingV4Params) params.log, callback, streamingV4Params),
); );
} }
@ -232,28 +235,28 @@ class Vault {
*/ */
getAccountIds(canonicalIDs, log, callback) { getAccountIds(canonicalIDs, log, callback) {
log.trace('getting accountIds from Vault based on canonicalIDs', log.trace('getting accountIds from Vault based on canonicalIDs',
{ canonicalIDs }); { canonicalIDs });
this.client.getAccountIds(canonicalIDs, this.client.getAccountIds(canonicalIDs,
{ reqUid: log.getSerializedUids() }, { reqUid: log.getSerializedUids() },
(err, info) => { (err, info) => {
if (err) { if (err) {
log.debug('received error message from vault', log.debug('received error message from vault',
{ errorMessage: err }); { errorMessage: err });
return callback(err); return callback(err);
}
const infoFromVault = info.message.body;
log.trace('info received from vault', { infoFromVault });
const result = {};
/* If the accountId was not found in Vault, do not
send the canonicalID back to the API */
Object.keys(infoFromVault).forEach(key => {
if (infoFromVault[key] !== 'NotFound' &&
infoFromVault[key] !== 'WrongFormat') {
result[key] = infoFromVault[key];
} }
const infoFromVault = info.message.body;
log.trace('info received from vault', { infoFromVault });
const result = {};
/* If the accountId was not found in Vault, do not
send the canonicalID back to the API */
Object.keys(infoFromVault).forEach(key => {
if (infoFromVault[key] !== 'NotFound' &&
infoFromVault[key] !== 'WrongFormat') {
result[key] = infoFromVault[key];
}
});
return callback(null, result);
}); });
return callback(null, result);
});
} }
/** checkPolicies -- call Vault to evaluate policies /** checkPolicies -- call Vault to evaluate policies
@ -312,5 +315,3 @@ class Vault {
}); });
} }
} }
module.exports = Vault;

View File

@ -1,22 +1,20 @@
'use strict'; // eslint-disable-line strict import * as crypto from 'crypto';
import errors from '../errors';
const crypto = require('crypto'); import * as queryString from 'querystring';
const errors = require('../errors'); import AuthInfo from './AuthInfo';
const queryString = require('querystring'); import * as v2 from './v2/authV2';
const AuthInfo = require('./AuthInfo'); import * as v4 from './v4/authV4';
const v2 = require('./v2/authV2'); import * as constants from '../constants';
const v4 = require('./v4/authV4'); import constructStringToSignV2 from './v2/constructStringToSign';
const constants = require('../constants'); import constructStringToSignV4 from './v4/constructStringToSign';
const constructStringToSignV2 = require('./v2/constructStringToSign'); import { convertUTCtoISO8601 } from './v4/timeUtils';
const constructStringToSignV4 = require('./v4/constructStringToSign'); import * as vaultUtilities from './backends/in_memory/vaultUtilities';
const convertUTCtoISO8601 = require('./v4/timeUtils').convertUTCtoISO8601; import * as inMemoryBackend from './backends/in_memory/Backend';
const vaultUtilities = require('./backends/in_memory/vaultUtilities'); import validateAuthConfig from './backends/in_memory/validateAuthConfig';
const inMemoryBackend = require('./backends/in_memory/Backend'); import AuthLoader from './backends/in_memory/AuthLoader';
const validateAuthConfig = require('./backends/in_memory/validateAuthConfig'); import Vault from './Vault';
const AuthLoader = require('./backends/in_memory/AuthLoader'); import baseBackend from './backends/base';
const Vault = require('./Vault'); import chainBackend from './backends/ChainBackend';
const baseBackend = require('./backends/base');
const chainBackend = require('./backends/ChainBackend');
let vault = null; let vault = null;
const auth = {}; const auth = {};
@ -73,8 +71,9 @@ function extractParams(request, log, awsService, data) {
} else if (authHeader.startsWith('AWS4')) { } else if (authHeader.startsWith('AWS4')) {
version = 'v4'; version = 'v4';
} else { } else {
log.trace('invalid authorization security header', log.trace('invalid authorization security header', {
{ header: authHeader }); header: authHeader,
});
return { err: errors.AccessDenied }; return { err: errors.AccessDenied };
} }
} else if (data.Signature) { } else if (data.Signature) {
@ -88,8 +87,10 @@ function extractParams(request, log, awsService, data) {
// Here, either both values are set, or none is set // Here, either both values are set, or none is set
if (version !== null && method !== null) { if (version !== null && method !== null) {
if (!checkFunctions[version] || !checkFunctions[version][method]) { if (!checkFunctions[version] || !checkFunctions[version][method]) {
log.trace('invalid auth version or method', log.trace('invalid auth version or method', {
{ version, authMethod: method }); version,
authMethod: method,
});
return { err: errors.NotImplemented }; return { err: errors.NotImplemented };
} }
log.trace('identified auth method', { version, authMethod: method }); log.trace('identified auth method', { version, authMethod: method });
@ -121,10 +122,11 @@ function doAuth(request, log, cb, awsService, requestContexts) {
return cb(null, res.params); return cb(null, res.params);
} }
if (requestContexts) { if (requestContexts) {
requestContexts.forEach(requestContext => { requestContexts.forEach((requestContext) => {
requestContext.setAuthType(res.params.data.authType); requestContext.setAuthType(res.params.data.authType);
requestContext.setSignatureVersion(res.params requestContext.setSignatureVersion(
.data.signatureVersion); res.params.data.signatureVersion
);
requestContext.setSignatureAge(res.params.data.signatureAge); requestContext.setSignatureAge(res.params.data.signatureAge);
requestContext.setSecurityToken(res.params.data.securityToken); requestContext.setSecurityToken(res.params.data.securityToken);
}); });
@ -136,8 +138,12 @@ function doAuth(request, log, cb, awsService, requestContexts) {
return vault.authenticateV2Request(res.params, requestContexts, cb); return vault.authenticateV2Request(res.params, requestContexts, cb);
} }
if (res.params.version === 4) { if (res.params.version === 4) {
return vault.authenticateV4Request(res.params, requestContexts, cb, return vault.authenticateV4Request(
awsService); res.params,
requestContexts,
cb,
awsService
);
} }
log.error('authentication method not found', { log.error('authentication method not found', {
@ -160,16 +166,22 @@ function doAuth(request, log, cb, awsService, requestContexts) {
* are temporary credentials from STS * are temporary credentials from STS
* @return {undefined} * @return {undefined}
*/ */
function generateV4Headers(request, data, accessKey, secretKeyValue, function generateV4Headers(
awsService, proxyPath, sessionToken) { request,
data,
accessKey,
secretKeyValue,
awsService,
proxyPath,
sessionToken
) {
Object.assign(request, { headers: {} }); Object.assign(request, { headers: {} });
const amzDate = convertUTCtoISO8601(Date.now()); const amzDate = convertUTCtoISO8601(Date.now());
// get date without time // get date without time
const scopeDate = amzDate.slice(0, amzDate.indexOf('T')); const scopeDate = amzDate.slice(0, amzDate.indexOf('T'));
const region = 'us-east-1'; const region = 'us-east-1';
const service = awsService || 'iam'; const service = awsService || 'iam';
const credentialScope = const credentialScope = `${scopeDate}/${region}/${service}/aws4_request`;
`${scopeDate}/${region}/${service}/aws4_request`;
const timestamp = amzDate; const timestamp = amzDate;
const algorithm = 'AWS4-HMAC-SHA256'; const algorithm = 'AWS4-HMAC-SHA256';
@ -179,8 +191,10 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
encodeURIComponent, encodeURIComponent,
}); });
} }
const payloadChecksum = crypto.createHash('sha256') const payloadChecksum = crypto
.update(payload, 'binary').digest('hex'); .createHash('sha256')
.update(payload, 'binary')
.digest('hex');
request.setHeader('host', request._headers.host); request.setHeader('host', request._headers.host);
request.setHeader('x-amz-date', amzDate); request.setHeader('x-amz-date', amzDate);
request.setHeader('x-amz-content-sha256', payloadChecksum); request.setHeader('x-amz-content-sha256', payloadChecksum);
@ -191,47 +205,49 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
Object.assign(request.headers, request._headers); Object.assign(request.headers, request._headers);
const signedHeaders = Object.keys(request._headers) const signedHeaders = Object.keys(request._headers)
.filter(headerName => .filter(
headerName.startsWith('x-amz-') (headerName) =>
|| headerName.startsWith('x-scal-') headerName.startsWith('x-amz-') ||
|| headerName === 'host' headerName.startsWith('x-scal-') ||
).sort().join(';'); headerName === 'host'
const params = { request, signedHeaders, payloadChecksum, )
credentialScope, timestamp, query: data, .sort()
awsService: service, proxyPath }; .join(';');
const params = {
request,
signedHeaders,
payloadChecksum,
credentialScope,
timestamp,
query: data,
awsService: service,
proxyPath,
};
const stringToSign = constructStringToSignV4(params); const stringToSign = constructStringToSignV4(params);
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue, const signingKey = vaultUtilities.calculateSigningKey(
region, secretKeyValue,
scopeDate, region,
service); scopeDate,
const signature = crypto.createHmac('sha256', signingKey) service
.update(stringToSign, 'binary').digest('hex'); );
const authorizationHeader = `${algorithm} Credential=${accessKey}` + const signature = crypto
.createHmac('sha256', signingKey)
.update(stringToSign, 'binary')
.digest('hex');
const authorizationHeader =
`${algorithm} Credential=${accessKey}` +
`/${credentialScope}, SignedHeaders=${signedHeaders}, ` + `/${credentialScope}, SignedHeaders=${signedHeaders}, ` +
`Signature=${signature}`; `Signature=${signature}`;
request.setHeader('authorization', authorizationHeader); request.setHeader('authorization', authorizationHeader);
Object.assign(request, { headers: {} }); Object.assign(request, { headers: {} });
} }
module.exports = { export const server = { extractParams, doAuth };
setHandler: setAuthHandler, export const client = { generateV4Headers, constructStringToSignV2 };
server: { export const inMemory = {
extractParams, backend: inMemoryBackend,
doAuth, validateAuthConfig,
}, AuthLoader,
client: {
generateV4Headers,
constructStringToSignV2,
},
inMemory: {
backend: inMemoryBackend,
validateAuthConfig,
AuthLoader,
},
backends: {
baseBackend,
chainBackend,
},
AuthInfo,
Vault,
}; };
export const backends = { baseBackend, chainBackend };
export { setAuthHandler as setHandler, AuthInfo, Vault };

View File

@ -1,10 +1,8 @@
'use strict'; // eslint-disable-line strict import assert from 'assert';
import async from 'async';
const assert = require('assert'); import errors from '../../errors';
const async = require('async'); import BaseBackend from './base';
const errors = require('../../errors');
const BaseBackend = require('./base');
/** /**
* Class that provides an authentication backend that will verify signatures * Class that provides an authentication backend that will verify signatures
@ -13,13 +11,15 @@ const BaseBackend = require('./base');
* *
* @class ChainBackend * @class ChainBackend
*/ */
class ChainBackend extends BaseBackend { export default class ChainBackend extends BaseBackend {
_clients: any[];
/** /**
* @constructor * @constructor
* @param {string} service - service id * @param {string} service - service id
* @param {object[]} clients - list of authentication backends or vault clients * @param {object[]} clients - list of authentication backends or vault clients
*/ */
constructor(service, clients) { constructor(service: string, clients: any[]) {
super(service); super(service);
assert(Array.isArray(clients) && clients.length > 0, 'invalid client list'); assert(Array.isArray(clients) && clients.length > 0, 'invalid client list');
@ -34,7 +34,6 @@ class ChainBackend extends BaseBackend {
this._clients = clients; this._clients = clients;
} }
/* /*
* try task against each client for one to be successful * try task against each client for one to be successful
*/ */
@ -62,19 +61,20 @@ class ChainBackend extends BaseBackend {
verifySignatureV4(stringToSign, signatureFromRequest, accessKey, region, scopeDate, options, callback) { verifySignatureV4(stringToSign, signatureFromRequest, accessKey, region, scopeDate, options, callback) {
this._tryEachClient((client, done) => client.verifySignatureV4( this._tryEachClient((client, done) => client.verifySignatureV4(
stringToSign, stringToSign,
signatureFromRequest, signatureFromRequest,
accessKey, accessKey,
region, region,
scopeDate, scopeDate,
options, options,
done done
), callback); ), callback);
} }
static _mergeObjects(objectResponses) { static _mergeObjects(objectResponses) {
return objectResponses.reduce( return objectResponses.reduce(
(retObj, resObj) => Object.assign(retObj, resObj.message.body), (retObj, resObj) => Object.assign(retObj, resObj.message.body),
{}); {}
);
} }
getCanonicalIds(emailAddresses, options, callback) { getCanonicalIds(emailAddresses, options, callback) {
@ -90,7 +90,8 @@ class ChainBackend extends BaseBackend {
body: ChainBackend._mergeObjects(res), body: ChainBackend._mergeObjects(res),
}, },
}); });
}); }
);
} }
getEmailAddresses(canonicalIDs, options, callback) { getEmailAddresses(canonicalIDs, options, callback) {
@ -105,7 +106,8 @@ class ChainBackend extends BaseBackend {
body: ChainBackend._mergeObjects(res), body: ChainBackend._mergeObjects(res),
}, },
}); });
}); }
);
} }
/* /*
@ -128,7 +130,7 @@ class ChainBackend extends BaseBackend {
}); });
}); });
return Object.keys(policyMap).map(key => { return Object.keys(policyMap).map((key) => {
const policyRes = { isAllowed: policyMap[key].isAllowed }; const policyRes = { isAllowed: policyMap[key].isAllowed };
if (policyMap[key].arn !== '') { if (policyMap[key].arn !== '') {
policyRes.arn = policyMap[key].arn; policyRes.arn = policyMap[key].arn;
@ -172,12 +174,12 @@ class ChainBackend extends BaseBackend {
error: !!err ? err : null, error: !!err ? err : null,
status: res, status: res,
}) })
), (err, res) => { ), (err, res) => {
if (err) { if (err) {
return callback(err); return callback(err);
} }
const isError = res.some(results => !!results.error); const isError = res.some((results) => !!results.error);
if (isError) { if (isError) {
return callback(errors.InternalError, res); return callback(errors.InternalError, res);
} }
@ -185,5 +187,3 @@ class ChainBackend extends BaseBackend {
}); });
} }
} }
module.exports = ChainBackend;

View File

@ -1,13 +1,13 @@
'use strict'; // eslint-disable-line strict import errors from '../../errors';
const errors = require('../../errors');
/** /**
* Base backend class * Base backend class
* *
* @class BaseBackend * @class BaseBackend
*/ */
class BaseBackend { export default class BaseBackend {
service
/** /**
* @constructor * @constructor
* @param {string} service - service identifer for construction arn * @param {string} service - service identifer for construction arn
@ -82,5 +82,3 @@ class BaseBackend {
return callback(null, { code: 200, message: 'OK' }); return callback(null, { code: 200, message: 'OK' });
} }
} }
module.exports = BaseBackend;

View File

@ -1,9 +1,9 @@
const fs = require('fs'); import * as fs from 'fs';
const glob = require('simple-glob'); import glob from 'simple-glob';
const joi = require('@hapi/joi'); import joi from '@hapi/joi';
const werelogs = require('werelogs'); import werelogs from 'werelogs';
const ARN = require('../../../models/ARN'); import ARN from '../../../models/ARN';
/** /**
* Load authentication information from files or pre-loaded account * Load authentication information from files or pre-loaded account
@ -11,35 +11,46 @@ const ARN = require('../../../models/ARN');
* *
* @class AuthLoader * @class AuthLoader
*/ */
class AuthLoader { export default class AuthLoader {
_log
_authData
_isValid
_joiKeysValidator
_joiValidator
constructor(logApi) { constructor(logApi) {
this._log = new (logApi || werelogs).Logger('S3'); this._log = new (logApi || werelogs).Logger('S3');
this._authData = { accounts: [] }; this._authData = { accounts: [] };
// null: unknown validity, true/false: valid or invalid // null: unknown validity, true/false: valid or invalid
this._isValid = null; this._isValid = null;
this._joiKeysValidator = joi.array() this._joiKeysValidator = joi
.array()
.items({ .items({
access: joi.string().required(), access: joi.string().required(),
secret: joi.string().required(), secret: joi.string().required(),
}) })
.required(); .required();
const accountsJoi = joi.array() const accountsJoi = joi
.items({ .array()
name: joi.string().required(), .items({
email: joi.string().email().required(), name: joi.string().required(),
arn: joi.string().required(), email: joi.string().email().required(),
canonicalID: joi.string().required(), arn: joi.string().required(),
shortid: joi.string().regex(/^[0-9]{12}$/).required(), canonicalID: joi.string().required(),
keys: this._joiKeysValidator, shortid: joi
// backward-compat .string()
users: joi.array(), .regex(/^[0-9]{12}$/)
}) .required(),
.required() keys: this._joiKeysValidator,
.unique('arn') // backward-compat
.unique('email') users: joi.array(),
.unique('canonicalID'); })
.required()
.unique('arn')
.unique('email')
.unique('canonicalID');
this._joiValidator = joi.object({ accounts: accountsJoi }); this._joiValidator = joi.object({ accounts: accountsJoi });
} }
@ -64,11 +75,12 @@ class AuthLoader {
* logging purpose * logging purpose
* @return {undefined} * @return {undefined}
*/ */
addAccounts(authData, filePath) { addAccounts(authData, filePath: string) {
const isValid = this._validateData(authData, filePath); const isValid = this._validateData(authData, filePath);
if (isValid) { if (isValid) {
this._authData.accounts = this._authData.accounts = this._authData.accounts.concat(
this._authData.accounts.concat(authData.accounts); authData.accounts
);
// defer validity checking when getting data to avoid // defer validity checking when getting data to avoid
// logging multiple times the errors (we need to validate // logging multiple times the errors (we need to validate
// all accounts at once to detect duplicate values) // all accounts at once to detect duplicate values)
@ -87,7 +99,7 @@ class AuthLoader {
* authentication info (see {@link addAccounts()} for format) * authentication info (see {@link addAccounts()} for format)
* @return {undefined} * @return {undefined}
*/ */
addFile(filePath) { addFile(filePath: string) {
const authData = JSON.parse(fs.readFileSync(filePath)); const authData = JSON.parse(fs.readFileSync(filePath));
this.addAccounts(authData, filePath); this.addAccounts(authData, filePath);
} }
@ -103,9 +115,9 @@ class AuthLoader {
* {@link addAccounts()} for JSON format. * {@link addAccounts()} for JSON format.
* @return {undefined} * @return {undefined}
*/ */
addFilesByGlob(globPattern) { addFilesByGlob(globPattern: string | string[]) {
const files = glob(globPattern); const files = glob(globPattern);
files.forEach(filePath => this.addFile(filePath)); files.forEach((filePath) => this.addFile(filePath));
} }
/** /**
@ -134,9 +146,10 @@ class AuthLoader {
return this.validate() ? this._authData : null; return this.validate() ? this._authData : null;
} }
_validateData(authData, filePath) { _validateData(authData, filePath?: string) {
const res = joi.validate(authData, this._joiValidator, const res = joi.validate(authData, this._joiValidator, {
{ abortEarly: false }); abortEarly: false,
});
if (res.error) { if (res.error) {
this._dumpJoiErrors(res.error.details, filePath); this._dumpJoiErrors(res.error.details, filePath);
return false; return false;
@ -144,19 +157,23 @@ class AuthLoader {
let allKeys = []; let allKeys = [];
let arnError = false; let arnError = false;
const validatedAuth = res.value; const validatedAuth = res.value;
validatedAuth.accounts.forEach(account => { validatedAuth.accounts.forEach((account) => {
// backward-compat: ignore arn if starts with 'aws:' and log a // backward-compat: ignore arn if starts with 'aws:' and log a
// warning // warning
if (account.arn.startsWith('aws:')) { if (account.arn.startsWith('aws:')) {
this._log.error( this._log.error(
'account must have a valid AWS ARN, legacy examples ' + 'account must have a valid AWS ARN, legacy examples ' +
'starting with \'aws:\' are not supported anymore. ' + "starting with 'aws:' are not supported anymore. " +
'Please convert to a proper account entry (see ' + 'Please convert to a proper account entry (see ' +
'examples at https://github.com/scality/S3/blob/' + 'examples at https://github.com/scality/S3/blob/' +
'master/conf/authdata.json). Also note that support ' + 'master/conf/authdata.json). Also note that support ' +
'for account users has been dropped.', 'for account users has been dropped.',
{ accountName: account.name, accountArn: account.arn, {
filePath }); accountName: account.name,
accountArn: account.arn,
filePath,
}
);
arnError = true; arnError = true;
return; return;
} }
@ -166,27 +183,33 @@ class AuthLoader {
'turning users into account entries (see examples at ' + 'turning users into account entries (see examples at ' +
'https://github.com/scality/S3/blob/master/conf/' + 'https://github.com/scality/S3/blob/master/conf/' +
'authdata.json)', 'authdata.json)',
{ accountName: account.name, accountArn: account.arn, {
filePath }); accountName: account.name,
accountArn: account.arn,
filePath,
}
);
arnError = true; arnError = true;
return; return;
} }
const arnObj = ARN.createFromString(account.arn); const arnObj = ARN.createFromString(account.arn);
if (arnObj.error) { if (arnObj.error) {
this._log.error( this._log.error('authentication config validation error', {
'authentication config validation error', reason: arnObj.error.description,
{ reason: arnObj.error.description, accountName: account.name,
accountName: account.name, accountArn: account.arn, accountArn: account.arn,
filePath }); filePath,
});
arnError = true; arnError = true;
return; return;
} }
if (!arnObj.isIAMAccount()) { if (!arnObj.isIAMAccount()) {
this._log.error( this._log.error('authentication config validation error', {
'authentication config validation error', reason: 'not an IAM account ARN',
{ reason: 'not an IAM account ARN', accountName: account.name,
accountName: account.name, accountArn: account.arn, accountArn: account.arn,
filePath }); filePath,
});
arnError = true; arnError = true;
return; return;
} }
@ -196,7 +219,9 @@ class AuthLoader {
return false; return false;
} }
const uniqueKeysRes = joi.validate( const uniqueKeysRes = joi.validate(
allKeys, this._joiKeysValidator.unique('access')); allKeys,
this._joiKeysValidator.unique('access')
);
if (uniqueKeysRes.error) { if (uniqueKeysRes.error) {
this._dumpJoiErrors(uniqueKeysRes.error.details, filePath); this._dumpJoiErrors(uniqueKeysRes.error.details, filePath);
return false; return false;
@ -205,7 +230,7 @@ class AuthLoader {
} }
_dumpJoiErrors(errors, filePath) { _dumpJoiErrors(errors, filePath) {
errors.forEach(err => { errors.forEach((err) => {
const logInfo = { item: err.path, filePath }; const logInfo = { item: err.path, filePath };
if (err.type === 'array.unique') { if (err.type === 'array.unique') {
logInfo.reason = `duplicate value '${err.context.path}'`; logInfo.reason = `duplicate value '${err.context.path}'`;
@ -214,10 +239,7 @@ class AuthLoader {
logInfo.reason = err.message; logInfo.reason = err.message;
logInfo.context = err.context; logInfo.context = err.context;
} }
this._log.error('authentication config validation error', this._log.error('authentication config validation error', logInfo);
logInfo);
}); });
} }
} }
module.exports = AuthLoader;

View File

@ -1,12 +1,8 @@
'use strict'; // eslint-disable-line strict import * as crypto from 'crypto';
import errors from '../../../errors';
const crypto = require('crypto'); import { calculateSigningKey, hashSignature } from './vaultUtilities';
import Indexer from './Indexer';
const errors = require('../../../errors'); import BaseBackend from '../base';
const calculateSigningKey = require('./vaultUtilities').calculateSigningKey;
const hashSignature = require('./vaultUtilities').hashSignature;
const Indexer = require('./Indexer');
const BaseBackend = require('../base');
function _formatResponse(userInfoToSend) { function _formatResponse(userInfoToSend) {
return { return {
@ -23,6 +19,9 @@ function _formatResponse(userInfoToSend) {
* @class InMemoryBackend * @class InMemoryBackend
*/ */
class InMemoryBackend extends BaseBackend { class InMemoryBackend extends BaseBackend {
indexer
formatResponse
/** /**
* @constructor * @constructor
* @param {string} service - service identifer for construction arn * @param {string} service - service identifer for construction arn
@ -36,15 +35,23 @@ class InMemoryBackend extends BaseBackend {
this.formatResponse = formatter; this.formatResponse = formatter;
} }
verifySignatureV2(stringToSign, signatureFromRequest, verifySignatureV2(
accessKey, options, callback) { stringToSign,
signatureFromRequest,
accessKey,
options,
callback
) {
const entity = this.indexer.getEntityByKey(accessKey); const entity = this.indexer.getEntityByKey(accessKey);
if (!entity) { if (!entity) {
return callback(errors.InvalidAccessKeyId); return callback(errors.InvalidAccessKeyId);
} }
const secretKey = this.indexer.getSecretKey(entity, accessKey); const secretKey = this.indexer.getSecretKey(entity, accessKey);
const reconstructedSig = const reconstructedSig = hashSignature(
hashSignature(stringToSign, secretKey, options.algo); stringToSign,
secretKey,
options.algo
);
if (signatureFromRequest !== reconstructedSig) { if (signatureFromRequest !== reconstructedSig) {
return callback(errors.SignatureDoesNotMatch); return callback(errors.SignatureDoesNotMatch);
} }
@ -58,16 +65,25 @@ class InMemoryBackend extends BaseBackend {
return callback(null, vaultReturnObject); return callback(null, vaultReturnObject);
} }
verifySignatureV4(stringToSign, signatureFromRequest, accessKey, verifySignatureV4(
region, scopeDate, options, callback) { stringToSign,
signatureFromRequest,
accessKey,
region,
scopeDate,
options,
callback
) {
const entity = this.indexer.getEntityByKey(accessKey); const entity = this.indexer.getEntityByKey(accessKey);
if (!entity) { if (!entity) {
return callback(errors.InvalidAccessKeyId); return callback(errors.InvalidAccessKeyId);
} }
const secretKey = this.indexer.getSecretKey(entity, accessKey); const secretKey = this.indexer.getSecretKey(entity, accessKey);
const signingKey = calculateSigningKey(secretKey, region, scopeDate); const signingKey = calculateSigningKey(secretKey, region, scopeDate);
const reconstructedSig = crypto.createHmac('sha256', signingKey) const reconstructedSig = crypto
.update(stringToSign, 'binary').digest('hex'); .createHmac('sha256', signingKey)
.update(stringToSign, 'binary')
.digest('hex');
if (signatureFromRequest !== reconstructedSig) { if (signatureFromRequest !== reconstructedSig) {
return callback(errors.SignatureDoesNotMatch); return callback(errors.SignatureDoesNotMatch);
} }
@ -83,14 +99,13 @@ class InMemoryBackend extends BaseBackend {
getCanonicalIds(emails, log, cb) { getCanonicalIds(emails, log, cb) {
const results = {}; const results = {};
emails.forEach(email => { emails.forEach((email) => {
const lowercasedEmail = email.toLowerCase(); const lowercasedEmail = email.toLowerCase();
const entity = this.indexer.getEntityByEmail(lowercasedEmail); const entity = this.indexer.getEntityByEmail(lowercasedEmail);
if (!entity) { if (!entity) {
results[email] = 'NotFound'; results[email] = 'NotFound';
} else { } else {
results[email] = results[email] = entity.canonicalID;
entity.canonicalID;
} }
}); });
const vaultReturnObject = { const vaultReturnObject = {
@ -103,7 +118,7 @@ class InMemoryBackend extends BaseBackend {
getEmailAddresses(canonicalIDs, options, cb) { getEmailAddresses(canonicalIDs, options, cb) {
const results = {}; const results = {};
canonicalIDs.forEach(canonicalId => { canonicalIDs.forEach((canonicalId) => {
const foundEntity = this.indexer.getEntityByCanId(canonicalId); const foundEntity = this.indexer.getEntityByCanId(canonicalId);
if (!foundEntity || !foundEntity.email) { if (!foundEntity || !foundEntity.email) {
results[canonicalId] = 'NotFound'; results[canonicalId] = 'NotFound';
@ -131,7 +146,7 @@ class InMemoryBackend extends BaseBackend {
*/ */
getAccountIds(canonicalIDs, options, cb) { getAccountIds(canonicalIDs, options, cb) {
const results = {}; const results = {};
canonicalIDs.forEach(canonicalID => { canonicalIDs.forEach((canonicalID) => {
const foundEntity = this.indexer.getEntityByCanId(canonicalID); const foundEntity = this.indexer.getEntityByCanId(canonicalID);
if (!foundEntity || !foundEntity.shortid) { if (!foundEntity || !foundEntity.shortid) {
results[canonicalID] = 'Not Found'; results[canonicalID] = 'Not Found';
@ -148,7 +163,6 @@ class InMemoryBackend extends BaseBackend {
} }
} }
class S3AuthBackend extends InMemoryBackend { class S3AuthBackend extends InMemoryBackend {
/** /**
* @constructor * @constructor

View File

@ -6,7 +6,7 @@
* *
* @class Indexer * @class Indexer
*/ */
class Indexer { export default class Indexer {
/** /**
* @constructor * @constructor
* @param {object} authdata - the authentication config file's data * @param {object} authdata - the authentication config file's data
@ -141,5 +141,3 @@ class Indexer {
return entity.accountDisplayName; return entity.accountDisplayName;
} }
} }
module.exports = Indexer;

View File

@ -1,4 +1,4 @@
const AuthLoader = require('./AuthLoader'); import AuthLoader from './AuthLoader';
/** /**
* @deprecated please use {@link AuthLoader} class instead * @deprecated please use {@link AuthLoader} class instead
@ -9,10 +9,8 @@ const AuthLoader = require('./AuthLoader');
* @return {boolean} true on erroneous data * @return {boolean} true on erroneous data
* false on success * false on success
*/ */
function validateAuthConfig(authdata, logApi) { export default function validateAuthConfig(authdata, logApi) {
const authLoader = new AuthLoader(logApi); const authLoader = new AuthLoader(logApi);
authLoader.addAccounts(authdata); authLoader.addAccounts(authdata);
return !authLoader.validate(); return !authLoader.validate();
} }
module.exports = validateAuthConfig;

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line strict import * as crypto from 'crypto';
const crypto = require('crypto');
/** hashSignature for v2 Auth /** hashSignature for v2 Auth
* @param {string} stringToSign - built string to sign per AWS rules * @param {string} stringToSign - built string to sign per AWS rules
@ -8,7 +6,11 @@ const crypto = require('crypto');
* @param {string} algorithm - either SHA256 or SHA1 * @param {string} algorithm - either SHA256 or SHA1
* @return {string} reconstructed signature * @return {string} reconstructed signature
*/ */
function hashSignature(stringToSign, secretKey, algorithm) { export function hashSignature(
stringToSign: string,
secretKey: string,
algorithm: 'SHA256' | 'SHA1'
): string {
const hmacObject = crypto.createHmac(algorithm, secretKey); const hmacObject = crypto.createHmac(algorithm, secretKey);
return hmacObject.update(stringToSign, 'binary').digest('base64'); return hmacObject.update(stringToSign, 'binary').digest('base64');
} }
@ -20,7 +22,12 @@ function hashSignature(stringToSign, secretKey, algorithm) {
* @param {string} [service] - To specify another service than s3 * @param {string} [service] - To specify another service than s3
* @return {string} signingKey - signingKey to calculate signature * @return {string} signingKey - signingKey to calculate signature
*/ */
function calculateSigningKey(secretKey, region, scopeDate, service) { export function calculateSigningKey(
secretKey: string,
region: string,
scopeDate: string,
service: string
): string {
const dateKey = crypto.createHmac('sha256', `AWS4${secretKey}`) const dateKey = crypto.createHmac('sha256', `AWS4${secretKey}`)
.update(scopeDate, 'binary').digest(); .update(scopeDate, 'binary').digest();
const dateRegionKey = crypto.createHmac('sha256', dateKey) const dateRegionKey = crypto.createHmac('sha256', dateKey)
@ -31,5 +38,3 @@ function calculateSigningKey(secretKey, region, scopeDate, service) {
.update('aws4_request', 'binary').digest(); .update('aws4_request', 'binary').digest();
return signingKey; return signingKey;
} }
module.exports = { hashSignature, calculateSigningKey };

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line strict export default function algoCheck(signatureLength) {
function algoCheck(signatureLength) {
let algo; let algo;
// If the signature sent is 44 characters, // If the signature sent is 44 characters,
// this means that sha256 was used: // this means that sha256 was used:
@ -15,5 +13,3 @@ function algoCheck(signatureLength) {
} }
return algo; return algo;
} }
module.exports = algoCheck;

View File

@ -1,5 +1,3 @@
'use strict'; // eslint-disable-line strict
const headerAuthCheck = require('./headerAuthCheck'); const headerAuthCheck = require('./headerAuthCheck');
const queryAuthCheck = require('./queryAuthCheck'); const queryAuthCheck = require('./queryAuthCheck');

View File

@ -1,9 +1,9 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const errors = require('../../errors'); import errors from '../../errors';
const epochTime = new Date('1970-01-01').getTime(); const epochTime = new Date('1970-01-01').getTime();
function checkRequestExpiry(timestamp, log) { export default function checkRequestExpiry(timestamp, log) {
// If timestamp is before epochTime, the request is invalid and return // If timestamp is before epochTime, the request is invalid and return
// errors.AccessDenied // errors.AccessDenied
if (timestamp < epochTime) { if (timestamp < epochTime) {
@ -17,7 +17,7 @@ function checkRequestExpiry(timestamp, log) {
log.trace('request timestamp', { requestTimestamp: timestamp }); log.trace('request timestamp', { requestTimestamp: timestamp });
log.trace('current timestamp', { currentTimestamp: currentTime }); log.trace('current timestamp', { currentTimestamp: currentTime });
const fifteenMinutes = (15 * 60 * 1000); const fifteenMinutes = 15 * 60 * 1000;
if (currentTime - timestamp > fifteenMinutes) { if (currentTime - timestamp > fifteenMinutes) {
log.trace('request timestamp is not within 15 minutes of current time'); log.trace('request timestamp is not within 15 minutes of current time');
log.debug('request time too skewed', { timestamp }); log.debug('request time too skewed', { timestamp });
@ -32,5 +32,3 @@ function checkRequestExpiry(timestamp, log) {
return undefined; return undefined;
} }
module.exports = checkRequestExpiry;

View File

@ -1,11 +1,8 @@
'use strict'; // eslint-disable-line strict import utf8 from 'utf8';
import getCanonicalizedAmzHeaders from './getCanonicalizedAmzHeaders';
import getCanonicalizedResource from './getCanonicalizedResource';
const utf8 = require('utf8'); export default function constructStringToSign(request, data, log, clientType?: any) {
const getCanonicalizedAmzHeaders = require('./getCanonicalizedAmzHeaders');
const getCanonicalizedResource = require('./getCanonicalizedResource');
function constructStringToSign(request, data, log, clientType) {
/* /*
Build signature per AWS requirements: Build signature per AWS requirements:
StringToSign = HTTP-Verb + '\n' + StringToSign = HTTP-Verb + '\n' +
@ -23,11 +20,11 @@ function constructStringToSign(request, data, log, clientType) {
const contentMD5 = headers['content-md5'] ? const contentMD5 = headers['content-md5'] ?
headers['content-md5'] : query['Content-MD5']; headers['content-md5'] : query['Content-MD5'];
stringToSign += (contentMD5 ? `${contentMD5}\n` : '\n'); stringToSign += contentMD5 ? `${contentMD5}\n` : '\n';
const contentType = headers['content-type'] ? const contentType = headers['content-type'] ?
headers['content-type'] : query['Content-Type']; headers['content-type'] : query['Content-Type'];
stringToSign += (contentType ? `${contentType}\n` : '\n'); stringToSign += contentType ? `${contentType}\n` : '\n';
/* /*
AWS docs are conflicting on whether to include x-amz-date header here AWS docs are conflicting on whether to include x-amz-date header here
@ -42,5 +39,3 @@ function constructStringToSign(request, data, log, clientType) {
+ getCanonicalizedResource(request, clientType); + getCanonicalizedResource(request, clientType);
return utf8.encode(stringToSign); return utf8.encode(stringToSign);
} }
module.exports = constructStringToSign;

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line strict export default function getCanonicalizedAmzHeaders(headers, clientType) {
function getCanonicalizedAmzHeaders(headers, clientType) {
/* /*
Iterate through headers and pull any headers that are x-amz headers. Iterate through headers and pull any headers that are x-amz headers.
Need to include 'x-amz-date' here even though AWS docs Need to include 'x-amz-date' here even though AWS docs
@ -41,7 +39,5 @@ function getCanonicalizedAmzHeaders(headers, clientType) {
// Build headerString // Build headerString
return amzHeaders.reduce((headerStr, current) => return amzHeaders.reduce((headerStr, current) =>
`${headerStr}${current[0]}:${current[1]}\n`, `${headerStr}${current[0]}:${current[1]}\n`,
''); '');
} }
module.exports = getCanonicalizedAmzHeaders;

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line strict import * as url from 'url';
const url = require('url');
const gcpSubresources = [ const gcpSubresources = [
'acl', 'acl',
@ -41,7 +39,7 @@ const awsSubresources = [
'website', 'website',
]; ];
function getCanonicalizedResource(request, clientType) { export default function getCanonicalizedResource(request, clientType) {
/* /*
This variable is used to determine whether to insert This variable is used to determine whether to insert
a '?' or '&'. Once a query parameter is added to the resourceString, a '?' or '&'. Once a query parameter is added to the resourceString,
@ -117,5 +115,3 @@ function getCanonicalizedResource(request, clientType) {
} }
return resourceString; return resourceString;
} }
module.exports = getCanonicalizedResource;

View File

@ -1,12 +1,10 @@
'use strict'; // eslint-disable-line strict import errors from '../../errors';
import * as constants from '../../constants';
import constructStringToSign from './constructStringToSign';
import checkRequestExpiry from './checkRequestExpiry';
import algoCheck from './algoCheck';
const errors = require('../../errors'); export function check(request, log, data) {
const constants = require('../../constants');
const constructStringToSign = require('./constructStringToSign');
const checkRequestExpiry = require('./checkRequestExpiry');
const algoCheck = require('./algoCheck');
function check(request, log, data) {
log.trace('running header auth check'); log.trace('running header auth check');
const headers = request.headers; const headers = request.headers;
@ -17,15 +15,19 @@ function check(request, log, data) {
} }
// Check to make sure timestamp is within 15 minutes of current time // Check to make sure timestamp is within 15 minutes of current time
let timestamp = headers['x-amz-date'] ? let timestamp = headers['x-amz-date']
headers['x-amz-date'] : headers.date; ? headers['x-amz-date']
: headers.date;
timestamp = Date.parse(timestamp); timestamp = Date.parse(timestamp);
if (!timestamp) { if (!timestamp) {
log.debug('missing or invalid date header', log.debug('missing or invalid date header', {
{ method: 'auth/v2/headerAuthCheck.check' }); method: 'auth/v2/headerAuthCheck.check',
return { err: errors.AccessDenied. });
customizeDescription('Authentication requires a valid Date or ' + return {
'x-amz-date header') }; err: errors.AccessDenied.customizeDescription(
'Authentication requires a valid Date or ' + 'x-amz-date header'
),
};
} }
const err = checkRequestExpiry(timestamp, log); const err = checkRequestExpiry(timestamp, log);
@ -46,8 +48,10 @@ function check(request, log, data) {
log.debug('invalid authorization header', { authInfo }); log.debug('invalid authorization header', { authInfo });
return { err: errors.InvalidArgument }; return { err: errors.InvalidArgument };
} }
const accessKey = semicolonIndex > 4 ? const accessKey =
authInfo.substring(4, semicolonIndex).trim() : undefined; semicolonIndex > 4
? authInfo.substring(4, semicolonIndex).trim()
: undefined;
if (typeof accessKey !== 'string' || accessKey.length === 0) { if (typeof accessKey !== 'string' || accessKey.length === 0) {
log.trace('invalid authorization header', { authInfo }); log.trace('invalid authorization header', { authInfo });
return { err: errors.MissingSecurityHeader }; return { err: errors.MissingSecurityHeader };
@ -80,5 +84,3 @@ function check(request, log, data) {
}, },
}; };
} }
module.exports = { check };

View File

@ -1,11 +1,9 @@
'use strict'; // eslint-disable-line strict import errors from '../../errors';
import * as constants from '../../constants';
import algoCheck from './algoCheck';
import constructStringToSign from './constructStringToSign';
const errors = require('../../errors'); export function check(request, log, data) {
const constants = require('../../constants');
const algoCheck = require('./algoCheck');
const constructStringToSign = require('./constructStringToSign');
function check(request, log, data) {
log.trace('running query auth check'); log.trace('running query auth check');
if (request.method === 'POST') { if (request.method === 'POST') {
log.debug('query string auth not supported for post requests'); log.debug('query string auth not supported for post requests');
@ -28,26 +26,28 @@ function check(request, log, data) {
*/ */
const expirationTime = parseInt(data.Expires, 10) * 1000; const expirationTime = parseInt(data.Expires, 10) * 1000;
if (Number.isNaN(expirationTime)) { if (Number.isNaN(expirationTime)) {
log.debug('invalid expires parameter', log.debug('invalid expires parameter', { expires: data.Expires });
{ expires: data.Expires });
return { err: errors.MissingSecurityHeader }; return { err: errors.MissingSecurityHeader };
} }
const currentTime = Date.now(); const currentTime = Date.now();
const preSignedURLExpiry = process.env.PRE_SIGN_URL_EXPIRY const preSignedURLExpiry =
&& !Number.isNaN(process.env.PRE_SIGN_URL_EXPIRY) process.env.PRE_SIGN_URL_EXPIRY &&
? Number.parseInt(process.env.PRE_SIGN_URL_EXPIRY, 10) !Number.isNaN(process.env.PRE_SIGN_URL_EXPIRY)
: constants.defaultPreSignedURLExpiry * 1000; ? Number.parseInt(process.env.PRE_SIGN_URL_EXPIRY, 10)
: constants.defaultPreSignedURLExpiry * 1000;
if (expirationTime > currentTime + preSignedURLExpiry) { if (expirationTime > currentTime + preSignedURLExpiry) {
log.debug('expires parameter too far in future', log.debug('expires parameter too far in future', {
{ expires: request.query.Expires }); expires: request.query.Expires,
});
return { err: errors.AccessDenied }; return { err: errors.AccessDenied };
} }
if (currentTime > expirationTime) { if (currentTime > expirationTime) {
log.debug('current time exceeds expires time', log.debug('current time exceeds expires time', {
{ expires: request.query.Expires }); expires: request.query.Expires,
});
return { err: errors.RequestTimeTooSkewed }; return { err: errors.RequestTimeTooSkewed };
} }
const accessKey = data.AWSAccessKeyId; const accessKey = data.AWSAccessKeyId;
@ -82,5 +82,3 @@ function check(request, log, data) {
}, },
}; };
} }
module.exports = { check };

View File

@ -1,5 +1,3 @@
'use strict'; // eslint-disable-line strict
/* /*
AWS's URI encoding rules: AWS's URI encoding rules:
URI encode every byte. Uri-Encode() must enforce the following rules: URI encode every byte. Uri-Encode() must enforce the following rules:
@ -32,23 +30,27 @@ function _toHexUTF8(char) {
return res; return res;
} }
function awsURIencode(input, encodeSlash, noEncodeStar) { export default function awsURIencode(input, encodeSlash?: any, noEncodeStar?: any) {
const encSlash = encodeSlash === undefined ? true : encodeSlash; const encSlash = encodeSlash === undefined ? true : encodeSlash;
let encoded = ''; let encoded = '';
/** /**
* Duplicate query params are not suppported by AWS S3 APIs. These params * Duplicate query params are not suppported by AWS S3 APIs. These params
* are parsed as Arrays by Node.js HTTP parser which breaks this method * are parsed as Arrays by Node.js HTTP parser which breaks this method
*/ */
if (typeof input !== 'string') { if (typeof input !== 'string') {
return encoded; return encoded;
} }
for (let i = 0; i < input.length; i++) { for (let i = 0; i < input.length; i++) {
let ch = input.charAt(i); let ch = input.charAt(i);
if ((ch >= 'A' && ch <= 'Z') || if (
(ch >= 'A' && ch <= 'Z') ||
(ch >= 'a' && ch <= 'z') || (ch >= 'a' && ch <= 'z') ||
(ch >= '0' && ch <= '9') || (ch >= '0' && ch <= '9') ||
ch === '_' || ch === '-' || ch === '_' ||
ch === '~' || ch === '.') { ch === '-' ||
ch === '~' ||
ch === '.'
) {
encoded = encoded.concat(ch); encoded = encoded.concat(ch);
} else if (ch === ' ') { } else if (ch === ' ') {
encoded = encoded.concat('%20'); encoded = encoded.concat('%20');
@ -76,5 +78,3 @@ function awsURIencode(input, encodeSlash, noEncodeStar) {
} }
return encoded; return encoded;
} }
module.exports = awsURIencode;

View File

@ -1,17 +1,22 @@
'use strict'; // eslint-disable-line strict import * as crypto from 'crypto';
import createCanonicalRequest from './createCanonicalRequest';
const crypto = require('crypto');
const createCanonicalRequest = require('./createCanonicalRequest');
/** /**
* constructStringToSign - creates V4 stringToSign * constructStringToSign - creates V4 stringToSign
* @param {object} params - params object * @param {object} params - params object
* @returns {string} - stringToSign * @returns {string} - stringToSign
*/ */
function constructStringToSign(params) { export default function constructStringToSign(params): string {
const { request, signedHeaders, payloadChecksum, credentialScope, timestamp, const {
query, log, proxyPath } = params; request,
signedHeaders,
payloadChecksum,
credentialScope,
timestamp,
query,
log,
proxyPath,
} = params;
const path = proxyPath || request.path; const path = proxyPath || request.path;
const canonicalReqResult = createCanonicalRequest({ const canonicalReqResult = createCanonicalRequest({
@ -34,11 +39,11 @@ function constructStringToSign(params) {
log.debug('constructed canonicalRequest', { canonicalReqResult }); log.debug('constructed canonicalRequest', { canonicalReqResult });
} }
const sha256 = crypto.createHash('sha256'); const sha256 = crypto.createHash('sha256');
const canonicalHex = sha256.update(canonicalReqResult, 'binary') const canonicalHex = sha256
.update(canonicalReqResult, 'binary')
.digest('hex'); .digest('hex');
const stringToSign = `AWS4-HMAC-SHA256\n${timestamp}\n` + const stringToSign =
`${credentialScope}\n${canonicalHex}`; `AWS4-HMAC-SHA256\n${timestamp}\n` +
`${credentialScope}\n${canonicalHex}`;
return stringToSign; return stringToSign;
} }
module.exports = constructStringToSign;

View File

@ -1,8 +1,6 @@
'use strict'; // eslint-disable-line strict import awsURIencode from './awsURIencode';
import * as crypto from 'crypto';
const awsURIencode = require('./awsURIencode'); import * as queryString from 'querystring';
const crypto = require('crypto');
const queryString = require('querystring');
/** /**
* createCanonicalRequest - creates V4 canonical request * createCanonicalRequest - creates V4 canonical request
@ -12,7 +10,7 @@ const queryString = require('querystring');
* payloadChecksum (from request) * payloadChecksum (from request)
* @returns {string} - canonicalRequest * @returns {string} - canonicalRequest
*/ */
function createCanonicalRequest(params) { export default function createCanonicalRequest(params) {
const pHttpVerb = params.pHttpVerb; const pHttpVerb = params.pHttpVerb;
const pResource = params.pResource; const pResource = params.pResource;
const pQuery = params.pQuery; const pQuery = params.pQuery;
@ -87,5 +85,3 @@ function createCanonicalRequest(params) {
`${signedHeaders}\n${payloadChecksum}`; `${signedHeaders}\n${payloadChecksum}`;
return canonicalRequest; return canonicalRequest;
} }
module.exports = createCanonicalRequest;

View File

@ -1,16 +1,16 @@
'use strict'; // eslint-disable-line strict import errors from '../../../lib/errors';
import * as constants from '../../constants';
const errors = require('../../../lib/errors'); import constructStringToSign from './constructStringToSign';
const constants = require('../../constants'); import {
checkTimeSkew,
const constructStringToSign = require('./constructStringToSign'); convertUTCtoISO8601,
const checkTimeSkew = require('./timeUtils').checkTimeSkew; convertAmzTimeToMs,
const convertUTCtoISO8601 = require('./timeUtils').convertUTCtoISO8601; } from './timeUtils';
const convertAmzTimeToMs = require('./timeUtils').convertAmzTimeToMs; import {
const extractAuthItems = require('./validateInputs').extractAuthItems; extractAuthItems,
const validateCredentials = require('./validateInputs').validateCredentials; validateCredentials,
const areSignedHeadersComplete = areSignedHeadersComplete,
require('./validateInputs').areSignedHeadersComplete; } from './validateInputs';
/** /**
* V4 header auth check * V4 header auth check
@ -21,7 +21,7 @@ const areSignedHeadersComplete =
* @param {string} awsService - Aws service ('iam' or 's3') * @param {string} awsService - Aws service ('iam' or 's3')
* @return {callback} calls callback * @return {callback} calls callback
*/ */
function check(request, log, data, awsService) { export function check(request, log, data, awsService) {
log.trace('running header auth check'); log.trace('running header auth check');
const token = request.headers['x-amz-security-token']; const token = request.headers['x-amz-security-token'];
@ -51,8 +51,9 @@ function check(request, log, data, awsService) {
if (payloadChecksum === 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') { if (payloadChecksum === 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') {
log.trace('requesting streaming v4 auth'); log.trace('requesting streaming v4 auth');
if (request.method !== 'PUT') { if (request.method !== 'PUT') {
log.debug('streaming v4 auth for put only', log.debug('streaming v4 auth for put only', {
{ method: 'auth/v4/headerAuthCheck.check' }); method: 'auth/v4/headerAuthCheck.check',
});
return { err: errors.InvalidArgument }; return { err: errors.InvalidArgument };
} }
if (!request.headers['x-amz-decoded-content-length']) { if (!request.headers['x-amz-decoded-content-length']) {
@ -77,9 +78,12 @@ function check(request, log, data, awsService) {
if (xAmzDate) { if (xAmzDate) {
const xAmzDateArr = xAmzDate.split('T'); const xAmzDateArr = xAmzDate.split('T');
// check that x-amz- date has the correct format and after epochTime // check that x-amz- date has the correct format and after epochTime
if (xAmzDateArr.length === 2 && xAmzDateArr[0].length === 8 if (
&& xAmzDateArr[1].length === 7 xAmzDateArr.length === 2 &&
&& Number.parseInt(xAmzDateArr[0], 10) > 19700101) { xAmzDateArr[0].length === 8 &&
xAmzDateArr[1].length === 7 &&
Number.parseInt(xAmzDateArr[0], 10) > 19700101
) {
// format of x-amz- date is ISO 8601: YYYYMMDDTHHMMSSZ // format of x-amz- date is ISO 8601: YYYYMMDDTHHMMSSZ
timestamp = request.headers['x-amz-date']; timestamp = request.headers['x-amz-date'];
} }
@ -87,18 +91,27 @@ function check(request, log, data, awsService) {
timestamp = convertUTCtoISO8601(request.headers.date); timestamp = convertUTCtoISO8601(request.headers.date);
} }
if (!timestamp) { if (!timestamp) {
log.debug('missing or invalid date header', log.debug('missing or invalid date header', {
{ method: 'auth/v4/headerAuthCheck.check' }); method: 'auth/v4/headerAuthCheck.check',
return { err: errors.AccessDenied. });
customizeDescription('Authentication requires a valid Date or ' + return {
'x-amz-date header') }; err: errors.AccessDenied.customizeDescription(
'Authentication requires a valid Date or ' + 'x-amz-date header'
),
};
} }
const validationResult = validateCredentials(credentialsArr, timestamp, const validationResult = validateCredentials(
log); credentialsArr,
timestamp,
log
);
if (validationResult instanceof Error) { if (validationResult instanceof Error) {
log.debug('credentials in improper format', { credentialsArr, log.debug('credentials in improper format', {
timestamp, validationResult }); credentialsArr,
timestamp,
validationResult,
});
return { err: validationResult }; return { err: validationResult };
} }
// credentialsArr is [accessKey, date, region, aws-service, aws4_request] // credentialsArr is [accessKey, date, region, aws-service, aws4_request]
@ -121,7 +134,7 @@ function check(request, log, data, awsService) {
// expiry is as set out in the policy. // expiry is as set out in the policy.
// 15 minutes in seconds // 15 minutes in seconds
const expiry = (15 * 60); const expiry = 15 * 60;
const isTimeSkewed = checkTimeSkew(timestamp, expiry, log); const isTimeSkewed = checkTimeSkew(timestamp, expiry, log);
if (isTimeSkewed) { if (isTimeSkewed) {
return { err: errors.RequestTimeTooSkewed }; return { err: errors.RequestTimeTooSkewed };
@ -133,8 +146,11 @@ function check(request, log, data, awsService) {
proxyPath = decodeURIComponent(request.headers.proxy_path); proxyPath = decodeURIComponent(request.headers.proxy_path);
} catch (err) { } catch (err) {
log.debug('invalid proxy_path header', { proxyPath, err }); log.debug('invalid proxy_path header', { proxyPath, err });
return { err: errors.InvalidArgument.customizeDescription( return {
'invalid proxy_path header') }; err: errors.InvalidArgument.customizeDescription(
'invalid proxy_path header'
),
};
} }
} }
@ -154,7 +170,6 @@ function check(request, log, data, awsService) {
return { err: stringToSign }; return { err: stringToSign };
} }
return { return {
err: null, err: null,
params: { params: {
@ -178,5 +193,3 @@ function check(request, log, data, awsService) {
}, },
}; };
} }
module.exports = { check };

View File

@ -1,15 +1,10 @@
'use strict'; // eslint-disable-line strict import * as constants from '../../constants';
import errors from '../../errors';
const constants = require('../../constants'); import constructStringToSign from './constructStringToSign';
const errors = require('../../errors'); import { checkTimeSkew, convertAmzTimeToMs } from './timeUtils';
import { validateCredentials, extractQueryParams } from './validateInputs';
const constructStringToSign = require('./constructStringToSign'); import { areSignedHeadersComplete } from './validateInputs';
const checkTimeSkew = require('./timeUtils').checkTimeSkew;
const convertAmzTimeToMs = require('./timeUtils').convertAmzTimeToMs;
const validateCredentials = require('./validateInputs').validateCredentials;
const extractQueryParams = require('./validateInputs').extractQueryParams;
const areSignedHeadersComplete =
require('./validateInputs').areSignedHeadersComplete;
/** /**
* V4 query auth check * V4 query auth check
@ -18,7 +13,7 @@ const areSignedHeadersComplete =
* @param {object} data - Contain authentification params (GET or POST data) * @param {object} data - Contain authentification params (GET or POST data)
* @return {callback} calls callback * @return {callback} calls callback
*/ */
function check(request, log, data) { export function check(request, log, data) {
const authParams = extractQueryParams(data, log); const authParams = extractQueryParams(data, log);
if (Object.keys(authParams).length !== 5) { if (Object.keys(authParams).length !== 5) {
@ -44,11 +39,13 @@ function check(request, log, data) {
return { err: errors.AccessDenied }; return { err: errors.AccessDenied };
} }
const validationResult = validateCredentials(credential, timestamp, const validationResult = validateCredentials(credential, timestamp, log);
log);
if (validationResult instanceof Error) { if (validationResult instanceof Error) {
log.debug('credentials in improper format', { credential, log.debug('credentials in improper format', {
timestamp, validationResult }); credential,
timestamp,
validationResult,
});
return { err: validationResult }; return { err: validationResult };
} }
const accessKey = credential[0]; const accessKey = credential[0];
@ -68,8 +65,11 @@ function check(request, log, data) {
proxyPath = decodeURIComponent(request.headers.proxy_path); proxyPath = decodeURIComponent(request.headers.proxy_path);
} catch (err) { } catch (err) {
log.debug('invalid proxy_path header', { proxyPath }); log.debug('invalid proxy_path header', { proxyPath });
return { err: errors.InvalidArgument.customizeDescription( return {
'invalid proxy_path header') }; err: errors.InvalidArgument.customizeDescription(
'invalid proxy_path header'
),
};
} }
} }
@ -95,8 +95,7 @@ function check(request, log, data) {
signedHeaders, signedHeaders,
payloadChecksum, payloadChecksum,
timestamp, timestamp,
credentialScope: credentialScope: `${scopeDate}/${region}/${service}/${requestType}`,
`${scopeDate}/${region}/${service}/${requestType}`,
awsService: service, awsService: service,
proxyPath, proxyPath,
}); });
@ -122,5 +121,3 @@ function check(request, log, data) {
}, },
}; };
} }
module.exports = { check };

View File

@ -1,15 +1,31 @@
const { Transform } = require('stream'); import { Transform } from 'stream';
import async from 'async';
const async = require('async'); import errors from '../../../errors';
const errors = require('../../../errors'); import constructChunkStringToSign from './constructChunkStringToSign';
const constructChunkStringToSign = require('./constructChunkStringToSign');
/** /**
* This class is designed to handle the chunks sent in a streaming * This class is designed to handle the chunks sent in a streaming
* v4 Auth request * v4 Auth request
*/ */
class V4Transform extends Transform { export default class V4Transform extends Transform {
log;
cb;
accessKey;
region;
scopeDate;
timestamp;
credentialScope;
lastSignature;
currentSignature;
haveMetadata;
seekingDataSize;
currentData;
dataCursor;
currentMetadata;
lastPieceDone;
lastChunk;
vault;
/** /**
* @constructor * @constructor
* @param {object} streamingV4Params - info for chunk authentication * @param {object} streamingV4Params - info for chunk authentication
@ -28,8 +44,14 @@ class V4Transform extends Transform {
* @param {function} cb - callback to api * @param {function} cb - callback to api
*/ */
constructor(streamingV4Params, vault, log, cb) { constructor(streamingV4Params, vault, log, cb) {
const { accessKey, signatureFromRequest, region, scopeDate, timestamp, const {
credentialScope } = streamingV4Params; accessKey,
signatureFromRequest,
region,
scopeDate,
timestamp,
credentialScope,
} = streamingV4Params;
super({}); super({});
this.log = log; this.log = log;
this.cb = cb; this.cb = cb;
@ -79,28 +101,30 @@ class V4Transform extends Transform {
this.currentMetadata.push(remainingPlusStoredMetadata); this.currentMetadata.push(remainingPlusStoredMetadata);
return { completeMetadata: false }; return { completeMetadata: false };
} }
let fullMetadata = remainingPlusStoredMetadata.slice(0, let fullMetadata = remainingPlusStoredMetadata.slice(0, lineBreakIndex);
lineBreakIndex);
// handle extra line break on end of data chunk // handle extra line break on end of data chunk
if (fullMetadata.length === 0) { if (fullMetadata.length === 0) {
const chunkWithoutLeadingLineBreak = remainingPlusStoredMetadata const chunkWithoutLeadingLineBreak =
.slice(2); remainingPlusStoredMetadata.slice(2);
// find second line break // find second line break
lineBreakIndex = chunkWithoutLeadingLineBreak.indexOf('\r\n'); lineBreakIndex = chunkWithoutLeadingLineBreak.indexOf('\r\n');
if (lineBreakIndex < 0) { if (lineBreakIndex < 0) {
this.currentMetadata.push(chunkWithoutLeadingLineBreak); this.currentMetadata.push(chunkWithoutLeadingLineBreak);
return { completeMetadata: false }; return { completeMetadata: false };
} }
fullMetadata = chunkWithoutLeadingLineBreak.slice(0, fullMetadata = chunkWithoutLeadingLineBreak.slice(
lineBreakIndex); 0,
lineBreakIndex
);
} }
const splitMeta = fullMetadata.toString().split(';'); const splitMeta = fullMetadata.toString().split(';');
this.log.trace('parsed full metadata for chunk', { splitMeta }); this.log.trace('parsed full metadata for chunk', { splitMeta });
if (splitMeta.length !== 2) { if (splitMeta.length !== 2) {
this.log.trace('chunk body did not contain correct ' + this.log.trace(
'metadata format'); 'chunk body did not contain correct ' + 'metadata format'
);
return { err: errors.InvalidArgument }; return { err: errors.InvalidArgument };
} }
let dataSize = splitMeta[0]; let dataSize = splitMeta[0];
@ -132,8 +156,9 @@ class V4Transform extends Transform {
completeMetadata: true, completeMetadata: true,
// start slice at lineBreak plus 2 to remove line break at end of // start slice at lineBreak plus 2 to remove line break at end of
// metadata piece since length of '\r\n' is 2 // metadata piece since length of '\r\n' is 2
unparsedChunk: remainingPlusStoredMetadata unparsedChunk: remainingPlusStoredMetadata.slice(
.slice(lineBreakIndex + 2), lineBreakIndex + 2
),
}; };
} }
@ -146,10 +171,13 @@ class V4Transform extends Transform {
*/ */
_authenticate(dataToSend, done) { _authenticate(dataToSend, done) {
// use prior sig to construct new string to sign // use prior sig to construct new string to sign
const stringToSign = constructChunkStringToSign(this.timestamp, const stringToSign = constructChunkStringToSign(
this.credentialScope, this.lastSignature, dataToSend); this.timestamp,
this.log.trace('constructed chunk string to sign', this.credentialScope,
{ stringToSign }); this.lastSignature,
dataToSend
);
this.log.trace('constructed chunk string to sign', { stringToSign });
// once used prior sig to construct string to sign, reassign // once used prior sig to construct string to sign, reassign
// lastSignature to current signature // lastSignature to current signature
this.lastSignature = this.currentSignature; this.lastSignature = this.currentSignature;
@ -165,17 +193,18 @@ class V4Transform extends Transform {
credentialScope: this.credentialScope, credentialScope: this.credentialScope,
}, },
}; };
return this.vault.authenticateV4Request(vaultParams, null, err => { return this.vault.authenticateV4Request(vaultParams, null, (err) => {
if (err) { if (err) {
this.log.trace('err from vault on streaming v4 auth', this.log.trace('err from vault on streaming v4 auth', {
{ error: err, paramsSentToVault: vaultParams.data }); error: err,
paramsSentToVault: vaultParams.data,
});
return done(err); return done(err);
} }
return done(); return done();
}); });
} }
/** /**
* This function will parse the chunk into metadata and data, * This function will parse the chunk into metadata and data,
* use the metadata to authenticate with vault and send the * use the metadata to authenticate with vault and send the
@ -195,9 +224,10 @@ class V4Transform extends Transform {
if (this.lastPieceDone) { if (this.lastPieceDone) {
const slice = chunk.slice(0, 10); const slice = chunk.slice(0, 10);
this.log.trace('received chunk after end.' + this.log.trace(
'See first 10 bytes of chunk', 'received chunk after end.' + 'See first 10 bytes of chunk',
{ chunk: slice.toString() }); { chunk: slice.toString() }
);
return callback(); return callback();
} }
let unparsedChunk = chunk; let unparsedChunk = chunk;
@ -206,10 +236,11 @@ class V4Transform extends Transform {
// test function // test function
() => chunkLeftToEvaluate, () => chunkLeftToEvaluate,
// async function // async function
done => { (done) => {
if (!this.haveMetadata) { if (!this.haveMetadata) {
this.log.trace('do not have metadata so calling ' + this.log.trace(
'_parseMetadata'); 'do not have metadata so calling ' + '_parseMetadata'
);
// need to parse our metadata // need to parse our metadata
const parsedMetadataResults = const parsedMetadataResults =
this._parseMetadata(unparsedChunk); this._parseMetadata(unparsedChunk);
@ -227,7 +258,7 @@ class V4Transform extends Transform {
} }
if (this.lastChunk) { if (this.lastChunk) {
this.log.trace('authenticating final chunk with no data'); this.log.trace('authenticating final chunk with no data');
return this._authenticate(null, err => { return this._authenticate(null, (err) => {
if (err) { if (err) {
return done(err); return done(err);
} }
@ -246,17 +277,18 @@ class V4Transform extends Transform {
} }
// parse just the next data piece without \r\n at the end // parse just the next data piece without \r\n at the end
// (therefore, minus 2) // (therefore, minus 2)
const nextDataPiece = const nextDataPiece = unparsedChunk.slice(
unparsedChunk.slice(0, this.seekingDataSize - 2); 0,
this.seekingDataSize - 2
);
// add parsed data piece to other currentData pieces // add parsed data piece to other currentData pieces
// so that this.currentData is the full data piece // so that this.currentData is the full data piece
nextDataPiece.copy(this.currentData, this.dataCursor); nextDataPiece.copy(this.currentData, this.dataCursor);
return this._authenticate(this.currentData, err => { return this._authenticate(this.currentData, (err) => {
if (err) { if (err) {
return done(err); return done(err);
} }
unparsedChunk = unparsedChunk = unparsedChunk.slice(this.seekingDataSize);
unparsedChunk.slice(this.seekingDataSize);
this.push(this.currentData); this.push(this.currentData);
this.haveMetadata = false; this.haveMetadata = false;
this.seekingDataSize = -1; this.seekingDataSize = -1;
@ -267,7 +299,7 @@ class V4Transform extends Transform {
}); });
}, },
// final callback // final callback
err => { (err) => {
if (err) { if (err) {
return this.cb(err); return this.cb(err);
} }
@ -277,5 +309,3 @@ class V4Transform extends Transform {
); );
} }
} }
module.exports = V4Transform;

View File

@ -1,6 +1,5 @@
const crypto = require('crypto'); import * as crypto from 'crypto';
import * as constants from '../../../constants';
const constants = require('../../../constants');
/** /**
* Constructs stringToSign for chunk * Constructs stringToSign for chunk
@ -13,8 +12,12 @@ const constants = require('../../../constants');
* @param {string} justDataChunk - data portion of chunk * @param {string} justDataChunk - data portion of chunk
* @returns {string} stringToSign * @returns {string} stringToSign
*/ */
function constructChunkStringToSign(timestamp, export default function constructChunkStringToSign(
credentialScope, lastSignature, justDataChunk) { timestamp: string,
credentialScope: string,
lastSignature: string,
justDataChunk: string
): string {
let currentChunkHash; let currentChunkHash;
// for last chunk, there will be no data, so use emptyStringHash // for last chunk, there will be no data, so use emptyStringHash
if (!justDataChunk) { if (!justDataChunk) {
@ -22,11 +25,12 @@ function constructChunkStringToSign(timestamp,
} else { } else {
currentChunkHash = crypto.createHash('sha256'); currentChunkHash = crypto.createHash('sha256');
currentChunkHash = currentChunkHash currentChunkHash = currentChunkHash
.update(justDataChunk, 'binary').digest('hex'); .update(justDataChunk, 'binary')
.digest('hex');
} }
return `AWS4-HMAC-SHA256-PAYLOAD\n${timestamp}\n` + return (
`AWS4-HMAC-SHA256-PAYLOAD\n${timestamp}\n` +
`${credentialScope}\n${lastSignature}\n` + `${credentialScope}\n${lastSignature}\n` +
`${constants.emptyStringHash}\n${currentChunkHash}`; `${constants.emptyStringHash}\n${currentChunkHash}`
);
} }
module.exports = constructChunkStringToSign;

View File

@ -1,12 +1,10 @@
'use strict'; // eslint-disable-line strict
/** /**
* Convert timestamp to milliseconds since Unix Epoch * Convert timestamp to milliseconds since Unix Epoch
* @param {string} timestamp of ISO8601Timestamp format without * @param {string} timestamp of ISO8601Timestamp format without
* dashes or colons, e.g. 20160202T220410Z * dashes or colons, e.g. 20160202T220410Z
* @return {number} number of milliseconds since Unix Epoch * @return {number} number of milliseconds since Unix Epoch
*/ */
function convertAmzTimeToMs(timestamp) { export function convertAmzTimeToMs(timestamp) {
const arr = timestamp.split(''); const arr = timestamp.split('');
// Convert to YYYY-MM-DDTHH:mm:ss.sssZ // Convert to YYYY-MM-DDTHH:mm:ss.sssZ
const ISO8601time = `${arr.slice(0, 4).join('')}-${arr[4]}${arr[5]}` + const ISO8601time = `${arr.slice(0, 4).join('')}-${arr[4]}${arr[5]}` +
@ -21,7 +19,7 @@ function convertAmzTimeToMs(timestamp) {
* @param {string} timestamp of UTC form: Fri, 10 Feb 2012 21:34:55 GMT * @param {string} timestamp of UTC form: Fri, 10 Feb 2012 21:34:55 GMT
* @return {string} ISO8601 timestamp of form: YYYYMMDDTHHMMSSZ * @return {string} ISO8601 timestamp of form: YYYYMMDDTHHMMSSZ
*/ */
function convertUTCtoISO8601(timestamp) { export function convertUTCtoISO8601(timestamp) {
// convert to ISO string: YYYY-MM-DDTHH:mm:ss.sssZ. // convert to ISO string: YYYY-MM-DDTHH:mm:ss.sssZ.
const converted = new Date(timestamp).toISOString(); const converted = new Date(timestamp).toISOString();
// Remove "-"s and "."s and milliseconds // Remove "-"s and "."s and milliseconds
@ -36,7 +34,7 @@ function convertUTCtoISO8601(timestamp) {
* @param {object} log - log for request * @param {object} log - log for request
* @return {boolean} true if there is a time problem * @return {boolean} true if there is a time problem
*/ */
function checkTimeSkew(timestamp, expiry, log) { export function checkTimeSkew(timestamp, expiry, log) {
const currentTime = Date.now(); const currentTime = Date.now();
const fifteenMinutes = (15 * 60 * 1000); const fifteenMinutes = (15 * 60 * 1000);
const parsedTimestamp = convertAmzTimeToMs(timestamp); const parsedTimestamp = convertAmzTimeToMs(timestamp);
@ -56,5 +54,3 @@ function checkTimeSkew(timestamp, expiry, log) {
} }
return false; return false;
} }
module.exports = { convertAmzTimeToMs, convertUTCtoISO8601, checkTimeSkew };

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line strict import errors from '../../../lib/errors';
const errors = require('../../../lib/errors');
/** /**
* Validate Credentials * Validate Credentials
@ -11,7 +9,7 @@ const errors = require('../../../lib/errors');
* @param {object} log - logging object * @param {object} log - logging object
* @return {boolean} true if credentials are correct format, false if not * @return {boolean} true if credentials are correct format, false if not
*/ */
function validateCredentials(credentials, timestamp, log) { export function validateCredentials(credentials, timestamp, log) {
if (!Array.isArray(credentials) || credentials.length !== 5) { if (!Array.isArray(credentials) || credentials.length !== 5) {
log.warn('credentials in improper format', { credentials }); log.warn('credentials in improper format', { credentials });
return errors.InvalidArgument; return errors.InvalidArgument;
@ -25,32 +23,39 @@ function validateCredentials(credentials, timestamp, log) {
log.warn('accessKey provided is wrong format', { accessKey }); log.warn('accessKey provided is wrong format', { accessKey });
return errors.InvalidArgument; return errors.InvalidArgument;
} }
// The scope date (format YYYYMMDD) must be same date as the timestamp // The scope date (format YYYYMMDD) must be same date as the timestamp
// on the request from the x-amz-date param (if queryAuthCheck) // on the request from the x-amz-date param (if queryAuthCheck)
// or from the x-amz-date header or date header (if headerAuthCheck) // or from the x-amz-date header or date header (if headerAuthCheck)
// Format of timestamp is ISO 8601: YYYYMMDDTHHMMSSZ. // Format of timestamp is ISO 8601: YYYYMMDDTHHMMSSZ.
// http://docs.aws.amazon.com/AmazonS3/latest/API/ // http://docs.aws.amazon.com/AmazonS3/latest/API/
// sigv4-query-string-auth.html // sigv4-query-string-auth.html
// http://docs.aws.amazon.com/general/latest/gr/ // http://docs.aws.amazon.com/general/latest/gr/
// sigv4-date-handling.html // sigv4-date-handling.html
// convert timestamp to format of scopeDate YYYYMMDD // convert timestamp to format of scopeDate YYYYMMDD
const timestampDate = timestamp.split('T')[0]; const timestampDate = timestamp.split('T')[0];
if (scopeDate.length !== 8 || scopeDate !== timestampDate) { if (scopeDate.length !== 8 || scopeDate !== timestampDate) {
log.warn('scope date must be the same date as the timestamp date', log.warn('scope date must be the same date as the timestamp date', {
{ scopeDate, timestampDate }); scopeDate,
timestampDate,
});
return errors.RequestTimeTooSkewed; return errors.RequestTimeTooSkewed;
} }
if (service !== 's3' && service !== 'iam' && service !== 'ring' && if (
service !== 'sts') { service !== 's3' &&
service !== 'iam' &&
service !== 'ring' &&
service !== 'sts'
) {
log.warn('service in credentials is not one of s3/iam/ring/sts', { log.warn('service in credentials is not one of s3/iam/ring/sts', {
service, service,
}); });
return errors.InvalidArgument; return errors.InvalidArgument;
} }
if (requestType !== 'aws4_request') { if (requestType !== 'aws4_request') {
log.warn('requestType contained in params is not aws4_request', log.warn('requestType contained in params is not aws4_request', {
{ requestType }); requestType,
});
return errors.InvalidArgument; return errors.InvalidArgument;
} }
return {}; return {};
@ -62,13 +67,14 @@ function validateCredentials(credentials, timestamp, log) {
* @param {object} log - logging object * @param {object} log - logging object
* @return {object} object containing extracted query params for authV4 * @return {object} object containing extracted query params for authV4
*/ */
function extractQueryParams(queryObj, log) { export function extractQueryParams(queryObj, log) {
const authParams = {}; const authParams = {};
// Do not need the algorithm sent back // Do not need the algorithm sent back
if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') { if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') {
log.warn('algorithm param incorrect', log.warn('algorithm param incorrect', {
{ algo: queryObj['X-Amz-Algorithm'] }); algo: queryObj['X-Amz-Algorithm'],
});
return authParams; return authParams;
} }
@ -81,7 +87,6 @@ function extractQueryParams(queryObj, log) {
return authParams; return authParams;
} }
const signature = queryObj['X-Amz-Signature']; const signature = queryObj['X-Amz-Signature'];
if (signature && signature.length === 64) { if (signature && signature.length === 64) {
authParams.signatureFromRequest = signature; authParams.signatureFromRequest = signature;
@ -94,14 +99,15 @@ function extractQueryParams(queryObj, log) {
if (timestamp && timestamp.length === 16) { if (timestamp && timestamp.length === 16) {
authParams.timestamp = timestamp; authParams.timestamp = timestamp;
} else { } else {
log.warn('missing or invalid timestamp', log.warn('missing or invalid timestamp', {
{ timestamp: queryObj['X-Amz-Date'] }); timestamp: queryObj['X-Amz-Date'],
});
return authParams; return authParams;
} }
const expiry = Number.parseInt(queryObj['X-Amz-Expires'], 10); const expiry = Number.parseInt(queryObj['X-Amz-Expires'], 10);
const sevenDays = 604800; const sevenDays = 604800;
if (expiry && (expiry > 0 && expiry <= sevenDays)) { if (expiry && expiry > 0 && expiry <= sevenDays) {
authParams.expiry = expiry; authParams.expiry = expiry;
} else { } else {
log.warn('invalid expiry', { expiry }); log.warn('invalid expiry', { expiry });
@ -118,17 +124,15 @@ function extractQueryParams(queryObj, log) {
return authParams; return authParams;
} }
/** /**
* Extract and validate components from auth header * Extract and validate components from auth header
* @param {string} authHeader - authorization header from request * @param {string} authHeader - authorization header from request
* @param {object} log - logging object * @param {object} log - logging object
* @return {object} object containing extracted auth header items for authV4 * @return {object} object containing extracted auth header items for authV4
*/ */
function extractAuthItems(authHeader, log) { export function extractAuthItems(authHeader, log) {
const authItems = {}; const authItems = {};
const authArray = authHeader const authArray = authHeader.replace('AWS4-HMAC-SHA256 ', '').split(',');
.replace('AWS4-HMAC-SHA256 ', '').split(',');
if (authArray.length < 3) { if (authArray.length < 3) {
return authItems; return authItems;
@ -138,25 +142,34 @@ function extractAuthItems(authHeader, log) {
const signedHeadersStr = authArray[1]; const signedHeadersStr = authArray[1];
const signatureStr = authArray[2]; const signatureStr = authArray[2];
log.trace('credentials from request', { credentialStr }); log.trace('credentials from request', { credentialStr });
if (credentialStr && credentialStr.trim().startsWith('Credential=') if (
&& credentialStr.indexOf('/') > -1) { credentialStr &&
credentialStr.trim().startsWith('Credential=') &&
credentialStr.indexOf('/') > -1
) {
authItems.credentialsArr = credentialStr authItems.credentialsArr = credentialStr
.trim().replace('Credential=', '').split('/'); .trim()
.replace('Credential=', '')
.split('/');
} else { } else {
log.warn('missing credentials'); log.warn('missing credentials');
} }
log.trace('signed headers from request', { signedHeadersStr }); log.trace('signed headers from request', { signedHeadersStr });
if (signedHeadersStr && signedHeadersStr.trim() if (
.startsWith('SignedHeaders=')) { signedHeadersStr &&
signedHeadersStr.trim().startsWith('SignedHeaders=')
) {
authItems.signedHeaders = signedHeadersStr authItems.signedHeaders = signedHeadersStr
.trim().replace('SignedHeaders=', ''); .trim()
.replace('SignedHeaders=', '');
} else { } else {
log.warn('missing signed headers'); log.warn('missing signed headers');
} }
log.trace('signature from request', { signatureStr }); log.trace('signature from request', { signatureStr });
if (signatureStr && signatureStr.trim().startsWith('Signature=')) { if (signatureStr && signatureStr.trim().startsWith('Signature=')) {
authItems.signatureFromRequest = signatureStr authItems.signatureFromRequest = signatureStr
.trim().replace('Signature=', ''); .trim()
.replace('Signature=', '');
} else { } else {
log.warn('missing signature'); log.warn('missing signature');
} }
@ -170,21 +183,20 @@ function extractAuthItems(authHeader, log) {
* @param {object} allHeaders - request.headers * @param {object} allHeaders - request.headers
* @return {boolean} true if all x-amz-headers included and false if not * @return {boolean} true if all x-amz-headers included and false if not
*/ */
function areSignedHeadersComplete(signedHeaders, allHeaders) { export function areSignedHeadersComplete(signedHeaders, allHeaders) {
const signedHeadersList = signedHeaders.split(';'); const signedHeadersList = signedHeaders.split(';');
if (signedHeadersList.indexOf('host') === -1) { if (signedHeadersList.indexOf('host') === -1) {
return false; return false;
} }
const headers = Object.keys(allHeaders); const headers = Object.keys(allHeaders);
for (let i = 0; i < headers.length; i++) { for (let i = 0; i < headers.length; i++) {
if ((headers[i].startsWith('x-amz-') if (
|| headers[i].startsWith('x-scal-')) (headers[i].startsWith('x-amz-') ||
&& signedHeadersList.indexOf(headers[i]) === -1) { headers[i].startsWith('x-scal-')) &&
signedHeadersList.indexOf(headers[i]) === -1
) {
return false; return false;
} }
} }
return true; return true;
} }
module.exports = { validateCredentials, extractQueryParams,
areSignedHeadersComplete, extractAuthItems };

View File

@ -103,11 +103,11 @@ module.exports = {
gcpTaggingPrefix: 'aws-tag-', gcpTaggingPrefix: 'aws-tag-',
productName: 'APN/1.0 Scality/1.0 Scality CloudServer for Zenko', productName: 'APN/1.0 Scality/1.0 Scality CloudServer for Zenko',
legacyLocations: ['sproxyd', 'legacy'], legacyLocations: ['sproxyd', 'legacy'],
// healthcheck default call from nginx is every 2 seconds // healthcheck default call from nginx is every 2 seconds
// for external backends, don't call unless at least 1 minute // for external backends, don't call unless at least 1 minute
// (60,000 milliseconds) since last call // (60,000 milliseconds) since last call
externalBackendHealthCheckInterval: 60000, externalBackendHealthCheckInterval: 60000,
// some of the available data backends (if called directly rather // some of the available data backends (if called directly rather
// than through the multiple backend gateway) need a key provided // than through the multiple backend gateway) need a key provided
// as a string as first parameter of the get/delete methods. // as a string as first parameter of the get/delete methods.
clientsRequireStringKey: { sproxyd: true, cdmi: true }, clientsRequireStringKey: { sproxyd: true, cdmi: true },

View File

@ -1,4 +1,4 @@
'use strict'; // eslint-disable-line strict import { LevelDB } from 'level';
const writeOptions = { sync: true }; const writeOptions = { sync: true };
@ -18,7 +18,7 @@ const writeOptions = { sync: true };
* @param {String} message - the Error message. * @param {String} message - the Error message.
* @returns {Error} the Error object. * @returns {Error} the Error object.
*/ */
function propError(propName, message) { function propError(propName: string, message: string): Error {
const err = new Error(message); const err = new Error(message);
err[propName] = true; err[propName] = true;
return err; return err;
@ -27,7 +27,7 @@ function propError(propName, message) {
/** /**
* Running transaction with multiple updates to be committed atomically * Running transaction with multiple updates to be committed atomically
*/ */
class IndexTransaction { export class IndexTransaction {
/** /**
* Builds a new transaction * Builds a new transaction
* *
@ -36,7 +36,7 @@ class IndexTransaction {
* *
* @returns {IndexTransaction} a new empty transaction * @returns {IndexTransaction} a new empty transaction
*/ */
constructor(db) { constructor(db: LevelDB) {
this.operations = []; this.operations = [];
this.db = db; this.db = db;
this.closed = false; this.closed = false;
@ -63,13 +63,17 @@ class IndexTransaction {
*/ */
push(op) { push(op) {
if (this.closed) { if (this.closed) {
throw propError('pushOnCommittedTransaction', throw propError(
'can not add ops to already committed transaction'); 'pushOnCommittedTransaction',
'can not add ops to already committed transaction'
);
} }
if (op.type !== 'put' && op.type !== 'del') { if (op.type !== 'put' && op.type !== 'del') {
throw propError('invalidTransactionVerb', throw propError(
`unknown action type: ${op.type}`); 'invalidTransactionVerb',
`unknown action type: ${op.type}`
);
} }
if (op.key === undefined) { if (op.key === undefined) {
@ -136,14 +140,22 @@ class IndexTransaction {
*/ */
addCondition(condition) { addCondition(condition) {
if (this.closed) { if (this.closed) {
throw propError('pushOnCommittedTransaction', throw propError(
'can not add conditions to already committed transaction'); 'pushOnCommittedTransaction',
'can not add conditions to already committed transaction'
);
} }
if (condition === undefined || Object.keys(condition).length === 0) { if (condition === undefined || Object.keys(condition).length === 0) {
throw propError('missingCondition', 'missing condition for conditional put'); throw propError(
'missingCondition',
'missing condition for conditional put'
);
} }
if (typeof (condition.notExists) !== 'string') { if (typeof condition.notExists !== 'string') {
throw propError('unsupportedConditionalOperation', 'missing key or supported condition'); throw propError(
'unsupportedConditionalOperation',
'missing key or supported condition'
);
} }
this.conditions.push(condition); this.conditions.push(condition);
} }
@ -158,13 +170,21 @@ class IndexTransaction {
*/ */
commit(cb) { commit(cb) {
if (this.closed) { if (this.closed) {
return cb(propError('alreadyCommitted', return cb(
'transaction was already committed')); propError(
'alreadyCommitted',
'transaction was already committed'
)
);
} }
if (this.operations.length === 0) { if (this.operations.length === 0) {
return cb(propError('emptyTransaction', return cb(
'tried to commit an empty transaction')); propError(
'emptyTransaction',
'tried to commit an empty transaction'
)
);
} }
this.closed = true; this.closed = true;
@ -176,7 +196,3 @@ class IndexTransaction {
return this.db.batch(this.operations, writeOptions, cb); return this.db.batch(this.operations, writeOptions, cb);
} }
} }
module.exports = {
IndexTransaction,
};

View File

@ -1,4 +1,4 @@
function reshapeExceptionError(error) { export function reshapeExceptionError(error) {
const { message, code, stack, name } = error; const { message, code, stack, name } = error;
return { return {
message, message,
@ -7,7 +7,3 @@ function reshapeExceptionError(error) {
name, name,
}; };
} }
module.exports = {
reshapeExceptionError,
};

View File

@ -1,11 +1,14 @@
'use strict'; // eslint-disable-line strict import errorsObj from '../errors/arsenalErrors.json';
/** /**
* ArsenalError * ArsenalError
* *
* @extends {Error} * @extends {Error}
*/ */
class ArsenalError extends Error { export class ArsenalError extends Error {
code: number
description: string
/** /**
* constructor. * constructor.
* *
@ -13,7 +16,7 @@ class ArsenalError extends Error {
* @param {number} code - HTTP status code * @param {number} code - HTTP status code
* @param {string} desc - Verbose description of error * @param {string} desc - Verbose description of error
*/ */
constructor(type, code, desc) { constructor(type: string, code: number, desc: string) {
super(type); super(type);
/** /**
@ -65,23 +68,12 @@ class ArsenalError extends Error {
} }
} }
/** const errors: { [key: string]: ArsenalError } = {};
* Generate an Errors instances object. Object.keys(errorsObj)
* .filter((index) => index !== '_comment')
* @returns {Object.<string, ArsenalError>} - object field by arsenalError .forEach((index) => {
* instances const { code, description } = errorsObj[index];
*/ errors[index] = new ArsenalError(index, code, description);
function errorsGen() { });
const errors = {};
const errorsObj = require('../errors/arsenalErrors.json');
Object.keys(errorsObj) export default errors;
.filter(index => index !== '_comment')
.forEach(index => {
errors[index] = new ArsenalError(index, errorsObj[index].code,
errorsObj[index].description);
});
return errors;
}
module.exports = errorsGen();

View File

@ -17,9 +17,9 @@ describe('decyrptSecret', () => {
describe('parseServiceCredentials', () => { describe('parseServiceCredentials', () => {
const conf = { const conf = {
users: [{ accessKey, users: [{ accessKey,
accountType: 'service-clueso', accountType: 'service-clueso',
secretKey, secretKey,
userName: 'Search Service Account' }], userName: 'Search Service Account' }],
}; };
const auth = JSON.stringify({ privateKey }); const auth = JSON.stringify({ privateKey });

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line strict export const ciphers = [
const ciphers = [
'DHE-RSA-AES128-GCM-SHA256', 'DHE-RSA-AES128-GCM-SHA256',
'ECDHE-ECDSA-AES128-GCM-SHA256', 'ECDHE-ECDSA-AES128-GCM-SHA256',
'ECDHE-RSA-AES256-GCM-SHA384', 'ECDHE-RSA-AES256-GCM-SHA384',
@ -28,7 +26,3 @@ const ciphers = [
'!EDH-RSA-DES-CBC3-SHA', '!EDH-RSA-DES-CBC3-SHA',
'!KRB5-DES-CBC3-SHA', '!KRB5-DES-CBC3-SHA',
].join(':'); ].join(':');
module.exports = {
ciphers,
};

View File

@ -29,16 +29,11 @@ c2CNfUEqyRbJF4pE9ZcdQReT5p/llmyhQdvq6cHH+cKJk63C6DHRVoStsnsUcvKe
bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg== bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg==
-----END DH PARAMETERS----- -----END DH PARAMETERS-----
*/ */
'use strict'; // eslint-disable-line strict
const dhparam = export const dhparam =
'MIIBCAKCAQEAh99T77KGNuiY9N6xrCJ3QNv4SFADTa3CD+1VMTAdRJLHUNpglB+i' + 'MIIBCAKCAQEAh99T77KGNuiY9N6xrCJ3QNv4SFADTa3CD+1VMTAdRJLHUNpglB+i' +
'AoTYiLDFZgtTCpx0ZZUD+JM3qiCZy0OK5/ZGlVD7sZmxjRtdpVK4qIPtwav8t0J7' + 'AoTYiLDFZgtTCpx0ZZUD+JM3qiCZy0OK5/ZGlVD7sZmxjRtdpVK4qIPtwav8t0J7' +
'c2CNfUEqyRbJF4pE9ZcdQReT5p/llmyhQdvq6cHH+cKJk63C6DHRVoStsnsUcvKe' + 'c2CNfUEqyRbJF4pE9ZcdQReT5p/llmyhQdvq6cHH+cKJk63C6DHRVoStsnsUcvKe' +
'23PLGZulKg8H3eRBxHamHkmyuEVDtoNhMIoJONsdXSpo5GgcD4EQMM8xb/qsnCxn' + '23PLGZulKg8H3eRBxHamHkmyuEVDtoNhMIoJONsdXSpo5GgcD4EQMM8xb/qsnCxn' +
'6QIGTBvcHskxtlTZOfUPk4XQ6Yb3tQi2TurzkQHLln4U7p/GZs+D+6D3SgSPqr6P' + '6QIGTBvcHskxtlTZOfUPk4XQ6Yb3tQi2TurzkQHLln4U7p/GZs+D+6D3SgSPqr6P' +
'bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg=='; 'bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg==';
module.exports = {
dhparam,
};

2
lib/https/index.ts Normal file
View File

@ -0,0 +1,2 @@
export * as ciphers from './ciphers'
export * as dhparam from './dh2048'

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line strict import ipaddr from 'ipaddr.js';
const ipaddr = require('ipaddr.js');
/** /**
* checkIPinRangeOrMatch checks whether a given ip address is in an ip address * checkIPinRangeOrMatch checks whether a given ip address is in an ip address
@ -9,7 +7,7 @@ const ipaddr = require('ipaddr.js');
* @param {object} ip - parsed ip address * @param {object} ip - parsed ip address
* @return {boolean} true if in range, false if not * @return {boolean} true if in range, false if not
*/ */
function checkIPinRangeOrMatch(cidr, ip) { export function checkIPinRangeOrMatch(cidr, ip) {
// If there is an exact match of the ip address, no need to check ranges // If there is an exact match of the ip address, no need to check ranges
if (ip.toString() === cidr) { if (ip.toString() === cidr) {
return true; return true;
@ -39,7 +37,7 @@ function checkIPinRangeOrMatch(cidr, ip) {
* @param {string} ip - IPV4/IPV6/IPV4-mapped IPV6 address * @param {string} ip - IPV4/IPV6/IPV4-mapped IPV6 address
* @return {object} parsedIp - Object representation of parsed IP * @return {object} parsedIp - Object representation of parsed IP
*/ */
function parseIp(ip) { export function parseIp(ip) {
if (ipaddr.IPv4.isValid(ip)) { if (ipaddr.IPv4.isValid(ip)) {
return ipaddr.parse(ip); return ipaddr.parse(ip);
} }
@ -60,7 +58,7 @@ function parseIp(ip) {
* @param {string} ip - IP address * @param {string} ip - IP address
* @return {boolean} - true if there is match or false for no match * @return {boolean} - true if there is match or false for no match
*/ */
function ipMatchCidrList(cidrList, ip) { export function ipMatchCidrList(cidrList, ip) {
const parsedIp = parseIp(ip); const parsedIp = parseIp(ip);
return cidrList.some(item => { return cidrList.some(item => {
let cidr; let cidr;
@ -75,9 +73,3 @@ function ipMatchCidrList(cidrList, ip) {
return checkIPinRangeOrMatch(cidr || item, parsedIp); return checkIPinRangeOrMatch(cidr || item, parsedIp);
}); });
} }
module.exports = {
checkIPinRangeOrMatch,
ipMatchCidrList,
parseIp,
};

View File

@ -1,6 +1,5 @@
'use strict'; // eslint-disable-line import { debuglog } from 'util';
const debug = debuglog('jsutil');
const debug = require('util').debuglog('jsutil');
// JavaScript utility functions // JavaScript utility functions
@ -17,7 +16,7 @@ const debug = require('util').debuglog('jsutil');
* @return {function} a callable wrapper mirroring <tt>func</tt> but * @return {function} a callable wrapper mirroring <tt>func</tt> but
* only calls <tt>func</tt> at first invocation. * only calls <tt>func</tt> at first invocation.
*/ */
module.exports.once = function once(func) { export function once(func) {
const state = { called: false, res: undefined }; const state = { called: false, res: undefined };
return function wrapper(...args) { return function wrapper(...args) {
if (!state.called) { if (!state.called) {
@ -25,7 +24,7 @@ module.exports.once = function once(func) {
state.res = func.apply(func, args); state.res = func.apply(func, args);
} else { } else {
debug('function already called:', func, debug('function already called:', func,
'returning cached result:', state.res); 'returning cached result:', state.res);
} }
return state.res; return state.res;
}; };

View File

@ -1,17 +1,19 @@
const Redis = require('ioredis'); import Redis from 'ioredis';
export default class RedisClient {
_client: Redis
class RedisClient {
/** /**
* @constructor * @constructor
* @param {Object} config - config * @param {Object} config - config
* @param {string} config.host - Redis host * @param {string} config.host - Redis host
* @param {number} config.port - Redis port * @param {number} config.port - Redis port
* @param {string} config.password - Redis password * @param {string} config.password - Redis password
* @param {werelogs.Logger} logger - logger instance * @param {werelogs.Logger} logger - logger instance
*/ */
constructor(config, logger) { constructor(config, logger) {
this._client = new Redis(config); this._client = new Redis(config);
this._client.on('error', err => this._client.on('error', (err) =>
logger.trace('error from redis', { logger.trace('error from redis', {
error: err, error: err,
method: 'RedisClient.constructor', method: 'RedisClient.constructor',
@ -23,18 +25,18 @@ class RedisClient {
} }
/** /**
* scan a pattern and return matching keys * scan a pattern and return matching keys
* @param {string} pattern - string pattern to match with all existing keys * @param {string} pattern - string pattern to match with all existing keys
* @param {number} [count=10] - scan count * @param {number} [count=10] - scan count
* @param {callback} cb - callback (error, result) * @param {callback} cb - callback (error, result)
* @return {undefined} * @return {undefined}
*/ */
scan(pattern, count = 10, cb) { scan(pattern: string, count = 10, cb) {
const params = { match: pattern, count }; const params = { match: pattern, count };
const keys = []; const keys = [];
const stream = this._client.scanStream(params); const stream = this._client.scanStream(params);
stream.on('data', resultKeys => { stream.on('data', (resultKeys) => {
for (let i = 0; i < resultKeys.length; i++) { for (let i = 0; i < resultKeys.length; i++) {
keys.push(resultKeys[i]); keys.push(resultKeys[i]);
} }
@ -45,15 +47,18 @@ class RedisClient {
} }
/** /**
* increment value of a key by 1 and set a ttl * increment value of a key by 1 and set a ttl
* @param {string} key - key holding the value * @param {string} key - key holding the value
* @param {number} expiry - expiry in seconds * @param {number} expiry - expiry in seconds
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
incrEx(key, expiry, cb) { incrEx(key: string, expiry: number, cb) {
return this._client return this._client
.multi([['incr', key], ['expire', key, expiry]]) .multi([
['incr', key],
['expire', key, expiry],
])
.exec(cb); .exec(cb);
} }
@ -64,7 +69,7 @@ class RedisClient {
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
incrby(key, amount, cb) { incrby(key: string, amount: number, cb) {
return this._client.incrby(key, amount, cb); return this._client.incrby(key, amount, cb);
} }
@ -76,9 +81,12 @@ class RedisClient {
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
incrbyEx(key, amount, expiry, cb) { incrbyEx(key: string, amount: number, expiry: number, cb) {
return this._client return this._client
.multi([['incrby', key, amount], ['expire', key, expiry]]) .multi([
['incrby', key, amount],
['expire', key, expiry],
])
.exec(cb); .exec(cb);
} }
@ -89,7 +97,7 @@ class RedisClient {
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
decrby(key, amount, cb) { decrby(key: string, amount: number, cb) {
return this._client.decrby(key, amount, cb); return this._client.decrby(key, amount, cb);
} }
@ -99,7 +107,7 @@ class RedisClient {
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
get(key, cb) { get(key: string, cb) {
return this._client.get(key, cb); return this._client.get(key, cb);
} }
@ -111,17 +119,17 @@ class RedisClient {
* If cb response returns 1, key exists. * If cb response returns 1, key exists.
* @return {undefined} * @return {undefined}
*/ */
exists(key, cb) { exists(key: string, cb) {
return this._client.exists(key, cb); return this._client.exists(key, cb);
} }
/** /**
* execute a batch of commands * execute a batch of commands
* @param {string[]} cmds - list of commands * @param {string[]} cmds - list of commands
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
batch(cmds, cb) { batch(cmds: string[], cb) {
return this._client.pipeline(cmds).exec(cb); return this._client.pipeline(cmds).exec(cb);
} }
@ -134,7 +142,7 @@ class RedisClient {
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
zadd(key, score, value, cb) { zadd(key: string, score: number, value: string, cb) {
return this._client.zadd(key, score, value, cb); return this._client.zadd(key, score, value, cb);
} }
@ -147,7 +155,7 @@ class RedisClient {
* @param {function} cb - callback * @param {function} cb - callback
* @return {undefined} * @return {undefined}
*/ */
zcard(key, cb) { zcard(key: string, cb) {
return this._client.zcard(key, cb); return this._client.zcard(key, cb);
} }
@ -161,7 +169,7 @@ class RedisClient {
* @param {function} cb - callback * @param {function} cb - callback
* @return {undefined} * @return {undefined}
*/ */
zscore(key, value, cb) { zscore(key: string, value: string, cb) {
return this._client.zscore(key, value, cb); return this._client.zscore(key, value, cb);
} }
@ -174,7 +182,7 @@ class RedisClient {
* The cb response returns number of values removed * The cb response returns number of values removed
* @return {undefined} * @return {undefined}
*/ */
zrem(key, value, cb) { zrem(key: string, value: string | any[], cb) {
return this._client.zrem(key, value, cb); return this._client.zrem(key, value, cb);
} }
@ -186,7 +194,7 @@ class RedisClient {
* @param {function} cb - callback * @param {function} cb - callback
* @return {undefined} * @return {undefined}
*/ */
zrange(key, start, end, cb) { zrange(key: string, start: number, end: number, cb) {
return this._client.zrange(key, start, end, cb); return this._client.zrange(key, start, end, cb);
} }
@ -200,7 +208,7 @@ class RedisClient {
* @param {function} cb - callback * @param {function} cb - callback
* @return {undefined} * @return {undefined}
*/ */
zrangebyscore(key, min, max, cb) { zrangebyscore(key: string, min: number | string, max: number | string, cb) {
return this._client.zrangebyscore(key, min, max, cb); return this._client.zrangebyscore(key, min, max, cb);
} }
@ -210,7 +218,7 @@ class RedisClient {
* @param {function} cb - callback * @param {function} cb - callback
* @return {undefined} * @return {undefined}
*/ */
ttl(key, cb) { ttl(key: string, cb) {
return this._client.ttl(key, cb); return this._client.ttl(key, cb);
} }
@ -226,5 +234,3 @@ class RedisClient {
return this._client.client('list', cb); return this._client.client('list', cb);
} }
} }
module.exports = RedisClient;

View File

@ -1,13 +1,18 @@
const async = require('async'); import async from 'async';
import RedisClient from './RedisClient';
export default class StatsClient {
_redis?: RedisClient;
_interval: number;
_expiry: number;
class StatsClient {
/** /**
* @constructor * @constructor
* @param {object} redisClient - RedisClient instance * @param {object} redisClient - RedisClient instance
* @param {number} interval - sampling interval by seconds * @param {number} interval - sampling interval by seconds
* @param {number} expiry - sampling duration by seconds * @param {number} expiry - sampling duration by seconds
*/ */
constructor(redisClient, interval, expiry) { constructor(redisClient: RedisClient, interval: number, expiry: number) {
this._redis = redisClient; this._redis = redisClient;
this._interval = interval; this._interval = interval;
this._expiry = expiry; this._expiry = expiry;
@ -24,9 +29,9 @@ class StatsClient {
* @param {object} d - Date instance * @param {object} d - Date instance
* @return {number} timestamp - normalized to the nearest interval * @return {number} timestamp - normalized to the nearest interval
*/ */
_normalizeTimestamp(d) { _normalizeTimestamp(d: Date): number {
const s = d.getSeconds(); const s = d.getSeconds();
return d.setSeconds(s - s % this._interval, 0); return d.setSeconds(s - (s % this._interval), 0);
} }
/** /**
@ -34,7 +39,7 @@ class StatsClient {
* @param {object} d - Date instance * @param {object} d - Date instance
* @return {number} timestamp - set to the previous interval * @return {number} timestamp - set to the previous interval
*/ */
_setPrevInterval(d) { _setPrevInterval(d: Date): number {
return d.setSeconds(d.getSeconds() - this._interval); return d.setSeconds(d.getSeconds() - this._interval);
} }
@ -44,7 +49,7 @@ class StatsClient {
* @param {Date} date - Date instance * @param {Date} date - Date instance
* @return {string} key - key for redis * @return {string} key - key for redis
*/ */
buildKey(name, date) { buildKey(name: string, date: Date): string {
return `${name}:${this._normalizeTimestamp(date)}`; return `${name}:${this._normalizeTimestamp(date)}`;
} }
@ -54,7 +59,7 @@ class StatsClient {
* @param {array} arr - Date instance * @param {array} arr - Date instance
* @return {string} key - key for redis * @return {string} key - key for redis
*/ */
_getCount(arr) { _getCount(arr: any[]): string {
return arr.reduce((prev, a) => { return arr.reduce((prev, a) => {
let num = parseInt(a[1], 10); let num = parseInt(a[1], 10);
num = Number.isNaN(num) ? 0 : num; num = Number.isNaN(num) ? 0 : num;
@ -69,7 +74,7 @@ class StatsClient {
* @param {function} cb - callback * @param {function} cb - callback
* @return {undefined} * @return {undefined}
*/ */
reportNewRequest(id, incr, cb) { reportNewRequest(id: string, incr: number, cb) {
if (!this._redis) { if (!this._redis) {
return undefined; return undefined;
} }
@ -81,8 +86,8 @@ class StatsClient {
callback = incr; callback = incr;
amount = 1; amount = 1;
} else { } else {
callback = (cb && typeof cb === 'function') ? cb : this._noop; callback = cb && typeof cb === 'function' ? cb : this._noop;
amount = (typeof incr === 'number') ? incr : 1; amount = typeof incr === 'number' ? incr : 1;
} }
const key = this.buildKey(`${id}:requests`, new Date()); const key = this.buildKey(`${id}:requests`, new Date());
@ -97,7 +102,7 @@ class StatsClient {
* @param {function} [cb] - callback * @param {function} [cb] - callback
* @return {undefined} * @return {undefined}
*/ */
incrementKey(key, incr, cb) { incrementKey(key: string, incr: number, cb) {
const callback = cb || this._noop; const callback = cb || this._noop;
return this._redis.incrby(key, incr, callback); return this._redis.incrby(key, incr, callback);
} }
@ -109,18 +114,18 @@ class StatsClient {
* @param {function} [cb] - callback * @param {function} [cb] - callback
* @return {undefined} * @return {undefined}
*/ */
decrementKey(key, decr, cb) { decrementKey(key: string, decr: number, cb) {
const callback = cb || this._noop; const callback = cb || this._noop;
return this._redis.decrby(key, decr, callback); return this._redis.decrby(key, decr, callback);
} }
/** /**
* report/record a request that ended up being a 500 on the server * report/record a request that ended up being a 500 on the server
* @param {string} id - service identifier * @param {string} id - service identifier
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
report500(id, cb) { report500(id: string, cb) {
if (!this._redis) { if (!this._redis) {
return undefined; return undefined;
} }
@ -136,41 +141,46 @@ class StatsClient {
* @param {callback} cb - callback to call with the err/result * @param {callback} cb - callback to call with the err/result
* @return {undefined} * @return {undefined}
*/ */
getAllStats(log, ids, cb) { getAllStats(log, ids: any[], cb) {
if (!this._redis) { if (!this._redis) {
return cb(null, {}); return cb(null, {});
} }
const statsRes = { const statsRes = {
'requests': 0, requests: 0,
'500s': 0, '500s': 0,
'sampleDuration': this._expiry, sampleDuration: this._expiry,
}; };
let requests = 0; let requests = 0;
let errors = 0; let errors = 0;
// for now set concurrency to default of 10 // for now set concurrency to default of 10
return async.eachLimit(ids, 10, (id, done) => { return async.eachLimit(
this.getStats(log, id, (err, res) => { ids,
if (err) { 10,
return done(err); (id, done) => {
} this.getStats(log, id, (err, res) => {
requests += res.requests; if (err) {
errors += res['500s']; return done(err);
return done(); }
}); requests += res.requests;
}, error => { errors += res['500s'];
if (error) { return done();
log.error('error getting stats', {
error,
method: 'StatsClient.getAllStats',
}); });
},
(error) => {
if (error) {
log.error('error getting stats', {
error,
method: 'StatsClient.getAllStats',
});
return cb(null, statsRes);
}
statsRes.requests = requests;
statsRes['500s'] = errors;
return cb(null, statsRes); return cb(null, statsRes);
} }
statsRes.requests = requests; );
statsRes['500s'] = errors;
return cb(null, statsRes);
});
} }
/** /**
@ -180,7 +190,7 @@ class StatsClient {
* @param {callback} cb - callback to call with the err/result * @param {callback} cb - callback to call with the err/result
* @return {undefined} * @return {undefined}
*/ */
getStats(log, id, cb) { getStats(log, id: string, cb) {
if (!this._redis) { if (!this._redis) {
return cb(null, {}); return cb(null, {});
} }
@ -205,9 +215,9 @@ class StatsClient {
* index 1 contains the result * index 1 contains the result
*/ */
const statsRes = { const statsRes = {
'requests': 0, requests: 0,
'500s': 0, '500s': 0,
'sampleDuration': this._expiry, sampleDuration: this._expiry,
}; };
if (err) { if (err) {
log.error('error getting stats', { log.error('error getting stats', {
@ -215,10 +225,10 @@ class StatsClient {
method: 'StatsClient.getStats', method: 'StatsClient.getStats',
}); });
/** /**
* Redis for stats is not a critial component, ignoring * Redis for stats is not a critial component, ignoring
* any error here as returning an InternalError * any error here as returning an InternalError
* would be confused with the health of the service * would be confused with the health of the service
*/ */
return cb(null, statsRes); return cb(null, statsRes);
} }
statsRes.requests = this._getCount(results[0]); statsRes.requests = this._getCount(results[0]);
@ -227,5 +237,3 @@ class StatsClient {
}); });
} }
} }
module.exports = StatsClient;

View File

@ -1,6 +1,5 @@
const async = require('async'); import async from 'async';
import StatsClient from './StatsClient';
const StatsClient = require('./StatsClient');
/** /**
* @class StatsModel * @class StatsModel
@ -8,40 +7,39 @@ const StatsClient = require('./StatsClient');
* @classdesc Extend and overwrite how timestamps are normalized by minutes * @classdesc Extend and overwrite how timestamps are normalized by minutes
* rather than by seconds * rather than by seconds
*/ */
class StatsModel extends StatsClient { export default class StatsModel extends StatsClient {
/** /**
* Utility method to convert 2d array rows to columns, and vice versa * Utility method to convert 2d array rows to columns, and vice versa
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip * See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip
* @param {array} arrays - 2d array of integers * @param {array} arrays - 2d array of integers
* @return {array} converted array * @return {array} converted array
*/ */
_zip(arrays) { _zip(arrays: number[][]) {
if (arrays.length > 0 && arrays.every(a => Array.isArray(a))) { if (arrays.length > 0 && arrays.every((a) => Array.isArray(a))) {
return arrays[0].map((_, i) => arrays.map(a => a[i])); return arrays[0].map((_, i) => arrays.map((a) => a[i]));
} }
return []; return [];
} }
/** /**
* normalize to the nearest interval * normalize to the nearest interval
* @param {object} d - Date instance * @param {object} d - Date instance
* @return {number} timestamp - normalized to the nearest interval * @return {number} timestamp - normalized to the nearest interval
*/ */
_normalizeTimestamp(d) { _normalizeTimestamp(d: Date) {
const m = d.getMinutes(); const m = d.getMinutes();
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0); return d.setMinutes(m - (m % Math.floor(this._interval / 60)), 0, 0);
} }
/** /**
* override the method to get the count as an array of integers separated * override the method to get the count as an array of integers separated
* by each interval * by each interval
* typical input looks like [[null, '1'], [null, '2'], [null, null]...] * typical input looks like [[null, '1'], [null, '2'], [null, null]...]
* @param {array} arr - each index contains the result of each batch command * @param {array} arr - each index contains the result of each batch command
* where index 0 signifies the error and index 1 contains the result * where index 0 signifies the error and index 1 contains the result
* @return {array} array of integers, ordered from most recent interval to * @return {array} array of integers, ordered from most recent interval to
* oldest interval with length of (expiry / interval) * oldest interval with length of (expiry / interval)
*/ */
_getCount(arr) { _getCount(arr) {
const size = Math.floor(this._expiry / this._interval); const size = Math.floor(this._expiry / this._interval);
const array = arr.reduce((store, i) => { const array = arr.reduce((store, i) => {
@ -58,23 +56,23 @@ class StatsModel extends StatsClient {
} }
/** /**
* wrapper on `getStats` that handles a list of keys * wrapper on `getStats` that handles a list of keys
* override the method to reduce the returned 2d array from `_getCount` * override the method to reduce the returned 2d array from `_getCount`
* @param {object} log - Werelogs request logger * @param {object} log - Werelogs request logger
* @param {array} ids - service identifiers * @param {array} ids - service identifiers
* @param {callback} cb - callback to call with the err/result * @param {callback} cb - callback to call with the err/result
* @return {undefined} * @return {undefined}
*/ */
getAllStats(log, ids, cb) { getAllStats(log, ids: string[], cb) {
if (!this._redis) { if (!this._redis) {
return cb(null, {}); return cb(null, {});
} }
const size = Math.floor(this._expiry / this._interval); const size = Math.floor(this._expiry / this._interval);
const statsRes = { const statsRes = {
'requests': Array(size).fill(0), requests: Array(size).fill(0),
'500s': Array(size).fill(0), '500s': Array(size).fill(0),
'sampleDuration': this._expiry, sampleDuration: this._expiry,
}; };
const requests = []; const requests = [];
const errors = []; const errors = [];
@ -118,9 +116,9 @@ class StatsModel extends StatsClient {
* @param {function} cb - Callback * @param {function} cb - Callback
* @return {undefined} * @return {undefined}
*/ */
getAllGlobalStats(ids, log, cb) { getAllGlobalStats(ids: string[], log, cb) {
const reqsKeys = ids.map(key => (['get', key])); const reqsKeys = ids.map((key) => ['get', key]);
return this._redis.batch(reqsKeys, (err, res) => { return this._redis!.batch(reqsKeys, (err, res) => {
const statsRes = { requests: 0 }; const statsRes = { requests: 0 };
if (err) { if (err) {
log.error('error getting metrics', { log.error('error getting metrics', {
@ -149,7 +147,7 @@ class StatsModel extends StatsClient {
* @param {Date} d - Date instance * @param {Date} d - Date instance
* @return {number} timestamp - normalized to the nearest hour * @return {number} timestamp - normalized to the nearest hour
*/ */
normalizeTimestampByHour(d) { normalizeTimestampByHour(d: Date) {
return d.setMinutes(0, 0, 0); return d.setMinutes(0, 0, 0);
} }
@ -158,7 +156,7 @@ class StatsModel extends StatsClient {
* @param {Date} d - Date instance * @param {Date} d - Date instance
* @return {number} timestamp - one hour prior to date passed * @return {number} timestamp - one hour prior to date passed
*/ */
_getDatePreviousHour(d) { _getDatePreviousHour(d: Date) {
return d.setHours(d.getHours() - 1); return d.setHours(d.getHours() - 1);
} }
@ -167,8 +165,8 @@ class StatsModel extends StatsClient {
* @param {number} epoch - epoch time * @param {number} epoch - epoch time
* @return {array} array of sorted set key timestamps * @return {array} array of sorted set key timestamps
*/ */
getSortedSetHours(epoch) { getSortedSetHours(epoch: number) {
const timestamps = []; const timestamps: number[] = [];
let date = this.normalizeTimestampByHour(new Date(epoch)); let date = this.normalizeTimestampByHour(new Date(epoch));
while (timestamps.length < 24) { while (timestamps.length < 24) {
timestamps.push(date); timestamps.push(date);
@ -182,7 +180,7 @@ class StatsModel extends StatsClient {
* @param {number} epoch - epoch time * @param {number} epoch - epoch time
* @return {string} normalized hour timestamp for given time * @return {string} normalized hour timestamp for given time
*/ */
getSortedSetCurrentHour(epoch) { getSortedSetCurrentHour(epoch: number) {
return this.normalizeTimestampByHour(new Date(epoch)); return this.normalizeTimestampByHour(new Date(epoch));
} }
@ -194,8 +192,8 @@ class StatsModel extends StatsClient {
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
addToSortedSet(key, score, value, cb) { addToSortedSet(key: string, score: number, value: string, cb) {
this._redis.exists(key, (err, resCode) => { this._redis!.exists(key, (err, resCode) => {
if (err) { if (err) {
return cb(err); return cb(err);
} }
@ -204,8 +202,7 @@ class StatsModel extends StatsClient {
const msInADay = 24 * 60 * 60 * 1000; const msInADay = 24 * 60 * 60 * 1000;
const nearestHour = this.normalizeTimestampByHour(new Date()); const nearestHour = this.normalizeTimestampByHour(new Date());
// in seconds // in seconds
const ttl = Math.ceil( const ttl = Math.ceil((msInADay - (Date.now() - nearestHour)) / 1000);
(msInADay - (Date.now() - nearestHour)) / 1000);
const cmds = [ const cmds = [
['zadd', key, score, value], ['zadd', key, score, value],
['expire', key, ttl], ['expire', key, ttl],
@ -214,7 +211,7 @@ class StatsModel extends StatsClient {
if (err) { if (err) {
return cb(err); return cb(err);
} }
const cmdErr = res.find(r => r[0] !== null); const cmdErr = res.find((r) => r[0] !== null);
if (cmdErr) { if (cmdErr) {
return cb(cmdErr); return cb(cmdErr);
} }
@ -222,9 +219,7 @@ class StatsModel extends StatsClient {
return cb(null, successResponse); return cb(null, successResponse);
}); });
} }
return this._redis.zadd(key, score, value, cb); return this._redis!.zadd(key, score, value, cb);
}); });
} }
} }
module.exports = StatsModel;

View File

@ -1,13 +1,13 @@
const promClient = require('prom-client'); import promClient from 'prom-client';
const collectDefaultMetricsIntervalMs = const collectDefaultMetricsIntervalMs =
process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined ? process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined
Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10) : ? Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10)
10000; : 10000;
promClient.collectDefaultMetrics({ timeout: collectDefaultMetricsIntervalMs }); promClient.collectDefaultMetrics({ timeout: collectDefaultMetricsIntervalMs });
class ZenkoMetrics { export default class ZenkoMetrics {
static createCounter(params) { static createCounter(params) {
return new promClient.Counter(params); return new promClient.Counter(params);
} }
@ -36,5 +36,3 @@ class ZenkoMetrics {
return promClient.register.contentType; return promClient.register.contentType;
} }
} }
module.exports = ZenkoMetrics;

View File

@ -1,11 +1,17 @@
const errors = require('../errors'); import errors from '../errors';
const validServices = { const validServices = {
aws: ['s3', 'iam', 'sts', 'ring'], aws: ['s3', 'iam', 'sts', 'ring'],
scality: ['utapi', 'sso'], scality: ['utapi', 'sso'],
}; };
class ARN { export default class ARN {
_partition: string;
_service: string;
_region: string | null;
_accountId: string | null;
_resource: string;
/** /**
* *
* Create an ARN object from its individual components * Create an ARN object from its individual components
@ -17,7 +23,7 @@ class ARN {
* @param {string} [accountId] - AWS 12-digit account ID * @param {string} [accountId] - AWS 12-digit account ID
* @param {string} resource - AWS resource path (e.g. 'foo/bar') * @param {string} resource - AWS resource path (e.g. 'foo/bar')
*/ */
constructor(partition, service, region, accountId, resource) { constructor(partition: string, service: string, region: string, accountId: string, resource: string) {
this._partition = partition; this._partition = partition;
this._service = service; this._service = service;
this._region = region || null; this._region = region || null;
@ -25,9 +31,9 @@ class ARN {
this._resource = resource; this._resource = resource;
} }
static createFromString(arnStr) { static createFromString(arnStr: string) {
const [arn, partition, service, region, accountId, const [arn, partition, service, region, accountId,
resourceType, resource] = arnStr.split(':'); resourceType, resource] = arnStr.split(':');
if (arn !== 'arn') { if (arn !== 'arn') {
return { error: errors.InvalidArgument.customizeDescription( return { error: errors.InvalidArgument.customizeDescription(
@ -57,8 +63,8 @@ class ARN {
`bad ARN: bad account ID "${accountId}": ` + `bad ARN: bad account ID "${accountId}": ` +
'must be a 12-digit number or "*"') }; 'must be a 12-digit number or "*"') };
} }
const fullResource = (resource !== undefined ? const fullResource = resource !== undefined ?
`${resourceType}:${resource}` : resourceType); `${resourceType}:${resource}` : resourceType;
return new ARN(partition, service, region, accountId, fullResource); return new ARN(partition, service, region, accountId, fullResource);
} }
@ -79,28 +85,26 @@ class ARN {
} }
isIAMAccount() { isIAMAccount() {
return this.getService() === 'iam' return this.getService() === 'iam' &&
&& this.getAccountId() !== null this.getAccountId() !== null &&
&& this.getAccountId() !== '*' this.getAccountId() !== '*' &&
&& this.getResource() === 'root'; this.getResource() === 'root';
} }
isIAMUser() { isIAMUser() {
return this.getService() === 'iam' return this.getService() === 'iam' &&
&& this.getAccountId() !== null this.getAccountId() !== null &&
&& this.getAccountId() !== '*' this.getAccountId() !== '*' &&
&& this.getResource().startsWith('user/'); this.getResource().startsWith('user/');
} }
isIAMRole() { isIAMRole() {
return this.getService() === 'iam' return this.getService() === 'iam' &&
&& this.getAccountId() !== null this.getAccountId() !== null &&
&& this.getResource().startsWith('role'); this.getResource().startsWith('role');
} }
toString() { toString() {
return ['arn', this.getPartition(), this.getService(), return ['arn', this.getPartition(), this.getService(),
this.getRegion(), this.getAccountId(), this.getResource()] this.getRegion(), this.getAccountId(), this.getResource()]
.join(':'); .join(':');
} }
} }
module.exports = ARN;

View File

@ -1,7 +1,13 @@
const { legacyLocations } = require('../constants'); import { legacyLocations } from '../constants';
const escapeForXml = require('../s3middleware/escapeForXml'); import escapeForXml from '../s3middleware/escapeForXml';
export default class BackendInfo {
_config;
_objectLocationConstraint;
_bucketLocationConstraint;
_requestEndpoint;
_legacyLocationConstraint;
class BackendInfo {
/** /**
* Represents the info necessary to evaluate which data backend to use * Represents the info necessary to evaluate which data backend to use
* on a data put call. * on a data put call.
@ -52,9 +58,9 @@ class BackendInfo {
*/ */
static isRequestEndpointPresent(config, requestEndpoint, log) { static isRequestEndpointPresent(config, requestEndpoint, log) {
if (Object.keys(config.restEndpoints). if (Object.keys(config.restEndpoints).
indexOf(requestEndpoint) < 0) { indexOf(requestEndpoint) < 0) {
log.trace('requestEndpoint does not match config restEndpoints', log.trace('requestEndpoint does not match config restEndpoints',
{ requestEndpoint }); { requestEndpoint });
return false; return false;
} }
return true; return true;
@ -70,10 +76,10 @@ class BackendInfo {
*/ */
static isRequestEndpointValueValid(config, requestEndpoint, log) { static isRequestEndpointValueValid(config, requestEndpoint, log) {
if (Object.keys(config.locationConstraints). if (Object.keys(config.locationConstraints).
indexOf(config.restEndpoints[requestEndpoint]) < 0) { indexOf(config.restEndpoints[requestEndpoint]) < 0) {
log.trace('the default locationConstraint for request' + log.trace('the default locationConstraint for request' +
'Endpoint does not match any config locationConstraint', 'Endpoint does not match any config locationConstraint',
{ requestEndpoint }); { requestEndpoint });
return false; return false;
} }
return true; return true;
@ -110,7 +116,7 @@ class BackendInfo {
*/ */
static isValidRequestEndpointOrBackend(config, requestEndpoint, log) { static isValidRequestEndpointOrBackend(config, requestEndpoint, log) {
if (!BackendInfo.isRequestEndpointPresent(config, requestEndpoint, if (!BackendInfo.isRequestEndpointPresent(config, requestEndpoint,
log)) { log)) {
return BackendInfo.isMemOrFileBackend(config, log); return BackendInfo.isMemOrFileBackend(config, log);
} }
return BackendInfo.isRequestEndpointValueValid(config, requestEndpoint, return BackendInfo.isRequestEndpointValueValid(config, requestEndpoint,
@ -132,7 +138,7 @@ class BackendInfo {
bucketLocationConstraint, requestEndpoint, log) { bucketLocationConstraint, requestEndpoint, log) {
if (objectLocationConstraint) { if (objectLocationConstraint) {
if (BackendInfo.isValidLocationConstraint(config, if (BackendInfo.isValidLocationConstraint(config,
objectLocationConstraint, log)) { objectLocationConstraint, log)) {
log.trace('objectLocationConstraint is valid'); log.trace('objectLocationConstraint is valid');
return { isValid: true }; return { isValid: true };
} }
@ -143,7 +149,7 @@ class BackendInfo {
} }
if (bucketLocationConstraint) { if (bucketLocationConstraint) {
if (BackendInfo.isValidLocationConstraint(config, if (BackendInfo.isValidLocationConstraint(config,
bucketLocationConstraint, log)) { bucketLocationConstraint, log)) {
log.trace('bucketLocationConstraint is valid'); log.trace('bucketLocationConstraint is valid');
return { isValid: true }; return { isValid: true };
} }
@ -159,7 +165,7 @@ class BackendInfo {
return { isValid: true, legacyLocationConstraint }; return { isValid: true, legacyLocationConstraint };
} }
if (!BackendInfo.isValidRequestEndpointOrBackend(config, if (!BackendInfo.isValidRequestEndpointOrBackend(config,
requestEndpoint, log)) { requestEndpoint, log)) {
return { isValid: false, description: 'Endpoint Location Error - ' + return { isValid: false, description: 'Endpoint Location Error - ' +
`Your endpoint "${requestEndpoint}" is not in restEndpoints ` + `Your endpoint "${requestEndpoint}" is not in restEndpoints ` +
'in your config OR the default location constraint for request ' + 'in your config OR the default location constraint for request ' +
@ -167,7 +173,7 @@ class BackendInfo {
'match any config locationConstraint - Please update.' }; 'match any config locationConstraint - Please update.' };
} }
if (BackendInfo.isRequestEndpointPresent(config, requestEndpoint, if (BackendInfo.isRequestEndpointPresent(config, requestEndpoint,
log)) { log)) {
return { isValid: true }; return { isValid: true };
} }
return { isValid: true, defaultedToDataBackend: true }; return { isValid: true, defaultedToDataBackend: true };
@ -236,5 +242,3 @@ class BackendInfo {
return this._config.backends.data; return this._config.backends.data;
} }
} }
module.exports = BackendInfo;

View File

@ -2,7 +2,9 @@
* Helper class to ease access to the Azure specific information for * Helper class to ease access to the Azure specific information for
* storage accounts mapped to buckets. * storage accounts mapped to buckets.
*/ */
class BucketAzureInfo { export default class BucketAzureInfo {
_data
/** /**
* @constructor * @constructor
* @param {object} obj - Raw structure for the Azure info on storage account * @param {object} obj - Raw structure for the Azure info on storage account
@ -233,5 +235,3 @@ class BucketAzureInfo {
return this._data; return this._data;
} }
} }
module.exports = BucketAzureInfo;

View File

@ -1,19 +1,44 @@
const assert = require('assert'); import assert from 'assert';
const uuid = require('uuid/v4'); import uuid from 'uuid/v4';
const { WebsiteConfiguration } = require('./WebsiteConfiguration'); import { WebsiteConfiguration } from './WebsiteConfiguration';
const ReplicationConfiguration = require('./ReplicationConfiguration'); import ReplicationConfiguration from './ReplicationConfiguration';
const LifecycleConfiguration = require('./LifecycleConfiguration'); import LifecycleConfiguration from './LifecycleConfiguration';
const ObjectLockConfiguration = require('./ObjectLockConfiguration'); import ObjectLockConfiguration from './ObjectLockConfiguration';
const BucketPolicy = require('./BucketPolicy'); import BucketPolicy from './BucketPolicy';
const NotificationConfiguration = require('./NotificationConfiguration'); import NotificationConfiguration from './NotificationConfiguration';
// WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG // WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG
// BucketInfoModelVersion.md can be found in documentation/ at the root // BucketInfoModelVersion.md can be found in documentation/ at the root
// of this repository // of this repository
const modelVersion = 14; const modelVersion = 14;
class BucketInfo { export default class BucketInfo {
_acl;
_name;
_owner;
_ownerDisplayName;
_creationDate;
_mdBucketModelVersion;
_transient;
_deleted;
_serverSideEncryption;
_versioningConfiguration;
_locationConstraint;
_readLocationConstraint;
_websiteConfiguration;
_replicationConfiguration;
_cors;
_lifecycleConfiguration;
_bucketPolicy;
_uid;
_isNFS;
_ingestion;
_azureInfo;
_objectLockEnabled;
_objectLockConfiguration;
_notificationConfiguration;
/** /**
* Represents all bucket information. * Represents all bucket information.
* @constructor * @constructor
@ -69,13 +94,13 @@ class BucketInfo {
* @param {object} [notificationConfiguration] - bucket notification configuration * @param {object} [notificationConfiguration] - bucket notification configuration
*/ */
constructor(name, owner, ownerDisplayName, creationDate, constructor(name, owner, ownerDisplayName, creationDate,
mdBucketModelVersion, acl, transient, deleted, mdBucketModelVersion, acl, transient, deleted,
serverSideEncryption, versioningConfiguration, serverSideEncryption, versioningConfiguration,
locationConstraint, websiteConfiguration, cors, locationConstraint, websiteConfiguration, cors,
replicationConfiguration, lifecycleConfiguration, replicationConfiguration, lifecycleConfiguration,
bucketPolicy, uid, readLocationConstraint, isNFS, bucketPolicy, uid, readLocationConstraint, isNFS,
ingestionConfig, azureInfo, objectLockEnabled, ingestionConfig, azureInfo, objectLockEnabled,
objectLockConfiguration, notificationConfiguration) { objectLockConfiguration, notificationConfiguration) {
assert.strictEqual(typeof name, 'string'); assert.strictEqual(typeof name, 'string');
assert.strictEqual(typeof owner, 'string'); assert.strictEqual(typeof owner, 'string');
assert.strictEqual(typeof ownerDisplayName, 'string'); assert.strictEqual(typeof ownerDisplayName, 'string');
@ -94,7 +119,7 @@ class BucketInfo {
if (serverSideEncryption) { if (serverSideEncryption) {
assert.strictEqual(typeof serverSideEncryption, 'object'); assert.strictEqual(typeof serverSideEncryption, 'object');
const { cryptoScheme, algorithm, masterKeyId, const { cryptoScheme, algorithm, masterKeyId,
configuredMasterKeyId, mandatory } = serverSideEncryption; configuredMasterKeyId, mandatory } = serverSideEncryption;
assert.strictEqual(typeof cryptoScheme, 'number'); assert.strictEqual(typeof cryptoScheme, 'number');
assert.strictEqual(typeof algorithm, 'string'); assert.strictEqual(typeof algorithm, 'string');
assert.strictEqual(typeof masterKeyId, 'string'); assert.strictEqual(typeof masterKeyId, 'string');
@ -768,5 +793,3 @@ class BucketInfo {
return this; return this;
} }
} }
module.exports = BucketInfo;

View File

@ -1,7 +1,7 @@
const assert = require('assert'); import assert from 'assert';
const errors = require('../errors'); import errors from '../errors';
const { validateResourcePolicy } = require('../policy/policyValidator'); import { validateResourcePolicy } from '../policy/policyValidator';
/** /**
* Format of json policy: * Format of json policy:
@ -49,7 +49,10 @@ const objectActions = [
's3:PutObjectTagging', 's3:PutObjectTagging',
]; ];
class BucketPolicy { export default class BucketPolicy {
_json
_policy
/** /**
* Create a Bucket Policy instance * Create a Bucket Policy instance
* @param {string} json - the json policy * @param {string} json - the json policy
@ -75,8 +78,11 @@ class BucketPolicy {
*/ */
_getPolicy() { _getPolicy() {
if (!this._json || this._json === '') { if (!this._json || this._json === '') {
return { error: errors.MalformedPolicy.customizeDescription( return {
'request json is empty or undefined') }; error: errors.MalformedPolicy.customizeDescription(
'request json is empty or undefined'
),
};
} }
const validSchema = validateResourcePolicy(this._json); const validSchema = validateResourcePolicy(this._json);
if (validSchema.error) { if (validSchema.error) {
@ -104,25 +110,32 @@ class BucketPolicy {
* @return {error} - contains error or empty obj * @return {error} - contains error or empty obj
*/ */
_validateActionResource() { _validateActionResource() {
const invalid = this._policy.Statement.every(s => { const invalid = this._policy.Statement.every((s) => {
const actions = typeof s.Action === 'string' ? const actions =
[s.Action] : s.Action; typeof s.Action === 'string' ? [s.Action] : s.Action;
const resources = typeof s.Resource === 'string' ? const resources =
[s.Resource] : s.Resource; typeof s.Resource === 'string' ? [s.Resource] : s.Resource;
const objectAction = actions.some(a => const objectAction = actions.some(
a.includes('Object') || objectActions.includes(a)); (a) => a.includes('Object') || objectActions.includes(a)
);
// wildcardObjectAction checks for actions such as 's3:*' or // wildcardObjectAction checks for actions such as 's3:*' or
// 's3:Put*' but will return false for actions such as // 's3:Put*' but will return false for actions such as
// 's3:PutBucket*' // 's3:PutBucket*'
const wildcardObjectAction = actions.some( const wildcardObjectAction = actions.some(
a => a.includes('*') && !a.includes('Bucket')); (a) => a.includes('*') && !a.includes('Bucket')
const objectResource = resources.some(r => r.includes('/')); );
return ((objectAction && !objectResource) || const objectResource = resources.some((r) => r.includes('/'));
(objectResource && !objectAction && !wildcardObjectAction)); return (
(objectAction && !objectResource) ||
(objectResource && !objectAction && !wildcardObjectAction)
);
}); });
if (invalid) { if (invalid) {
return { error: errors.MalformedPolicy.customizeDescription( return {
'Action does not apply to any resource(s) in statement') }; error: errors.MalformedPolicy.customizeDescription(
'Action does not apply to any resource(s) in statement'
),
};
} }
return {}; return {};
} }
@ -139,5 +152,3 @@ class BucketPolicy {
assert.deepStrictEqual(validated, { error: null, valid: true }); assert.deepStrictEqual(validated, { error: null, valid: true });
} }
} }
module.exports = BucketPolicy;

View File

@ -1,9 +1,9 @@
const assert = require('assert'); import assert from 'assert';
const UUID = require('uuid'); import UUID from 'uuid';
const errors = require('../errors'); import errors from '../errors';
const LifecycleRule = require('./LifecycleRule'); import LifecycleRule from './LifecycleRule';
const escapeForXml = require('../s3middleware/escapeForXml'); import escapeForXml from '../s3middleware/escapeForXml';
const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer. const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer.
@ -83,7 +83,7 @@ const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer.
}; };
*/ */
class LifecycleConfiguration { export default class LifecycleConfiguration {
/** /**
* Create a Lifecycle Configuration instance * Create a Lifecycle Configuration instance
* @param {string} xml - the parsed xml * @param {string} xml - the parsed xml
@ -381,7 +381,7 @@ class LifecycleConfiguration {
if (!tags[i].Key || !tags[i].Value) { if (!tags[i].Key || !tags[i].Value) {
tagObj.error = tagObj.error =
errors.MissingRequiredParameter.customizeDescription( errors.MissingRequiredParameter.customizeDescription(
'Tag XML does not contain both Key and Value'); 'Tag XML does not contain both Key and Value');
break; break;
} }
@ -929,7 +929,7 @@ class LifecycleConfiguration {
const daysInt = parseInt(subExp.Days[0], 10); const daysInt = parseInt(subExp.Days[0], 10);
if (daysInt < 1) { if (daysInt < 1) {
expObj.error = errors.InvalidArgument.customizeDescription( expObj.error = errors.InvalidArgument.customizeDescription(
'Expiration days is not a positive integer'); 'Expiration days is not a positive integer');
} else { } else {
expObj.days = daysInt; expObj.days = daysInt;
} }
@ -1125,10 +1125,10 @@ class LifecycleConfiguration {
const { noncurrentDays, storageClass } = transition; const { noncurrentDays, storageClass } = transition;
xml.push( xml.push(
`<${actionName}>`, `<${actionName}>`,
`<NoncurrentDays>${noncurrentDays}` + `<NoncurrentDays>${noncurrentDays}` +
'</NoncurrentDays>', '</NoncurrentDays>',
`<StorageClass>${storageClass}</StorageClass>`, `<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>` `</${actionName}>`,
); );
}); });
Action = xml.join(''); Action = xml.join('');
@ -1146,9 +1146,9 @@ class LifecycleConfiguration {
} }
xml.push( xml.push(
`<${actionName}>`, `<${actionName}>`,
element, element,
`<StorageClass>${storageClass}</StorageClass>`, `<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>` `</${actionName}>`,
); );
}); });
Action = xml.join(''); Action = xml.join('');
@ -1220,5 +1220,3 @@ class LifecycleConfiguration {
return { Rules: rulesJSON }; return { Rules: rulesJSON };
} }
} }
module.exports = LifecycleConfiguration;

View File

@ -1,11 +1,11 @@
const uuid = require('uuid/v4'); import uuid from 'uuid/v4';
/** /**
* @class LifecycleRule * @class LifecycleRule
* *
* @classdesc Simple get/set class to build a single Rule * @classdesc Simple get/set class to build a single Rule
*/ */
class LifecycleRule { export default class LifecycleRule {
constructor(id, status) { constructor(id, status) {
// defaults // defaults
this.id = id || uuid(); this.id = id || uuid();
@ -134,5 +134,3 @@ class LifecycleRule {
return this; return this;
} }
} }
module.exports = LifecycleRule;

View File

@ -1,11 +1,11 @@
const assert = require('assert'); import assert from 'assert';
const UUID = require('uuid'); import UUID from 'uuid';
const { import {
supportedNotificationEvents, supportedNotificationEvents,
notificationArnPrefix, notificationArnPrefix,
} = require('../constants'); } from '../constants';
const errors = require('../errors'); import errors from '../errors';
/** /**
* Format of xml request: * Format of xml request:
@ -27,7 +27,7 @@ const errors = require('../errors');
* </NotificationConfiguration> * </NotificationConfiguration>
*/ */
/** /**
* Format of config: * Format of config:
* *
* config = { * config = {
@ -51,7 +51,7 @@ const errors = require('../errors');
* } * }
*/ */
class NotificationConfiguration { export default class NotificationConfiguration {
/** /**
* Create a Notification Configuration instance * Create a Notification Configuration instance
* @param {string} xml - parsed configuration xml * @param {string} xml - parsed configuration xml
@ -307,5 +307,3 @@ class NotificationConfiguration {
return; return;
} }
} }
module.exports = NotificationConfiguration;

View File

@ -1,6 +1,5 @@
const assert = require('assert'); import assert from 'assert';
import errors from '../errors';
const errors = require('../errors');
/** /**
* Format of xml request: * Format of xml request:
@ -17,7 +16,7 @@ const errors = require('../errors');
* </ObjectLockConfiguration> * </ObjectLockConfiguration>
*/ */
/** /**
* Format of config: * Format of config:
* *
* config = { * config = {
@ -27,7 +26,7 @@ const errors = require('../errors');
* } * }
* } * }
*/ */
class ObjectLockConfiguration { export default class ObjectLockConfiguration {
/** /**
* Create an Object Lock Configuration instance * Create an Object Lock Configuration instance
* @param {string} xml - the parsed configuration xml * @param {string} xml - the parsed configuration xml
@ -234,5 +233,3 @@ class ObjectLockConfiguration {
'</ObjectLockConfiguration>'; '</ObjectLockConfiguration>';
} }
} }
module.exports = ObjectLockConfiguration;

View File

@ -1,16 +1,15 @@
const crypto = require('crypto'); import * as crypto from 'crypto';
const constants = require('../constants'); import * as constants from '../constants';
const VersionIDUtils = require('../versioning/VersionID'); import * as VersionIDUtils from '../versioning/VersionID';
const ObjectMDLocation = require('./ObjectMDLocation'); import ObjectMDLocation from './ObjectMDLocation';
/** /**
* Class to manage metadata object for regular s3 objects (instead of * Class to manage metadata object for regular s3 objects (instead of
* mpuPart metadata for example) * mpuPart metadata for example)
*/ */
class ObjectMD { export default class ObjectMD {
/** /**
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is * Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
* reserved for internal use, users should call * reserved for internal use, users should call
@ -149,7 +148,7 @@ class ObjectMD {
Object.assign(this._data, objMd._data); Object.assign(this._data, objMd._data);
Object.assign(this._data.replicationInfo, Object.assign(this._data.replicationInfo,
objMd._data.replicationInfo); objMd._data.replicationInfo);
} }
_updateFromParsedJSON(objMd) { _updateFromParsedJSON(objMd) {
@ -1190,5 +1189,3 @@ class ObjectMD {
return this._data; return this._data;
} }
} }
module.exports = ObjectMD;

View File

@ -2,7 +2,7 @@
* Helper class to ease access to the Azure specific information for * Helper class to ease access to the Azure specific information for
* Blob and Container objects. * Blob and Container objects.
*/ */
class ObjectMDAzureInfo { export default class ObjectMDAzureInfo {
/** /**
* @constructor * @constructor
* @param {object} obj - Raw structure for the Azure info on Blob/Container * @param {object} obj - Raw structure for the Azure info on Blob/Container
@ -158,5 +158,3 @@ class ObjectMDAzureInfo {
return this._data; return this._data;
} }
} }
module.exports = ObjectMDAzureInfo;

View File

@ -2,8 +2,7 @@
* Helper class to ease access to a single data location in metadata * Helper class to ease access to a single data location in metadata
* 'location' array * 'location' array
*/ */
class ObjectMDLocation { export default class ObjectMDLocation {
/** /**
* @constructor * @constructor
* @param {object} locationObj - single data location info * @param {object} locationObj - single data location info
@ -127,5 +126,3 @@ class ObjectMDLocation {
return this._data; return this._data;
} }
} }
module.exports = ObjectMDLocation;

View File

@ -1,9 +1,9 @@
const assert = require('assert'); import assert from 'assert';
const UUID = require('uuid'); import UUID from 'uuid';
const escapeForXml = require('../s3middleware/escapeForXml'); import escapeForXml from '../s3middleware/escapeForXml';
const errors = require('../errors'); import errors from '../errors';
const { isValidBucketName } = require('../s3routes/routesUtils'); import { isValidBucketName } from '../s3routes/routesUtils';
const MAX_RULES = 1000; const MAX_RULES = 1000;
const RULE_ID_LIMIT = 255; const RULE_ID_LIMIT = 255;
@ -37,7 +37,19 @@ const validStorageClasses = [
</ReplicationConfiguration> </ReplicationConfiguration>
*/ */
class ReplicationConfiguration { export default class ReplicationConfiguration {
_parsedXML
_log
_config
_configPrefixes
_configIDs
_role
_destination
_rules
_prevStorageClass
_hasScalityDestination
_preferredReadLocation
/** /**
* Create a ReplicationConfiguration instance * Create a ReplicationConfiguration instance
* @param {string} xml - The parsed XML * @param {string} xml - The parsed XML
@ -469,5 +481,3 @@ class ReplicationConfiguration {
}); });
} }
} }
module.exports = ReplicationConfiguration;

View File

@ -1,4 +1,7 @@
class RoutingRule { export class RoutingRule {
_redirect;
_condition;
/** /**
* Represents a routing rule in a website configuration. * Represents a routing rule in a website configuration.
* @constructor * @constructor
@ -52,7 +55,12 @@ class RoutingRule {
} }
} }
class WebsiteConfiguration { export class WebsiteConfiguration {
_indexDocument;
_errorDocument;
_redirectAllRequestsTo;
_routingRules;
/** /**
* Object that represents website configuration * Object that represents website configuration
* @constructor * @constructor
@ -188,8 +196,3 @@ class WebsiteConfiguration {
return this._routingRules; return this._routingRules;
} }
} }
module.exports = {
RoutingRule,
WebsiteConfiguration,
};

View File

@ -17,7 +17,7 @@ function shuffle(array) {
} }
} }
class RoundRobin { export default class RoundRobin {
/** /**
* @constructor * @constructor
* @param {object[]|string[]} hostsList - list of hosts to query * @param {object[]|string[]} hostsList - list of hosts to query
@ -111,7 +111,7 @@ class RoundRobin {
pickHost() { pickHost() {
if (this.logger) { if (this.logger) {
this.logger.debug('pick host', this.logger.debug('pick host',
{ host: this.getCurrentHost() }); { host: this.getCurrentHost() });
} }
const curHost = this.getCurrentHost(); const curHost = this.getCurrentHost();
++this.pickCount; ++this.pickCount;
@ -163,9 +163,7 @@ class RoundRobin {
} }
if (this.logger) { if (this.logger) {
this.logger.debug('round robin host', this.logger.debug('round robin host',
{ newHost: this.getCurrentHost() }); { newHost: this.getCurrentHost() });
} }
} }
} }
module.exports = RoundRobin;

View File

@ -1,16 +1,13 @@
'use strict'; // eslint-disable-line import * as http from 'http';
import * as https from 'https';
const http = require('http'); import assert from 'assert';
const https = require('https'); import { dhparam } from '../../https/dh2048';
const assert = require('assert'); import { ciphers } from '../../https/ciphers';
const dhparam = require('../../https/dh2048').dhparam; import errors from '../../errors';
const ciphers = require('../../https/ciphers').ciphers; import { checkSupportIPv6 } from './utils';
const errors = require('../../errors');
const { checkSupportIPv6 } = require('./utils');
class Server { export default class Server {
/** /**
* @constructor * @constructor
* *
@ -431,16 +428,16 @@ class Server {
// Setting no delay of the socket to the value configured // Setting no delay of the socket to the value configured
sock.setNoDelay(this.isNoDelay()); sock.setNoDelay(this.isNoDelay());
sock.on('error', err => this._logger.info( sock.on('error', err => this._logger.info(
'socket error - request rejected', { error: err })); 'socket error - request rejected', { error: err }));
}); });
this._server.on('tlsClientError', (err, sock) => this._server.on('tlsClientError', (err, sock) =>
this._onClientError(err, sock)); this._onClientError(err, sock));
this._server.on('clientError', (err, sock) => this._server.on('clientError', (err, sock) =>
this._onClientError(err, sock)); this._onClientError(err, sock));
this._server.on('checkContinue', (req, res) => this._server.on('checkContinue', (req, res) =>
this._onCheckContinue(req, res)); this._onCheckContinue(req, res));
this._server.on('checkExpectation', (req, res) => this._server.on('checkExpectation', (req, res) =>
this._onCheckExpectation(req, res)); this._onCheckExpectation(req, res));
this._server.on('listening', () => this._onListening()); this._server.on('listening', () => this._onListening());
} }
this._server.listen(this._port, this._address); this._server.listen(this._port, this._address);
@ -459,5 +456,3 @@ class Server {
return this; return this;
} }
} }
module.exports = Server;

View File

@ -1,7 +1,5 @@
'use strict'; // eslint-disable-line import * as os from 'os';
import errors from '../../errors';
const os = require('os');
const errors = require('../../errors');
/** /**
* Parse the Range header into an object * Parse the Range header into an object
@ -16,7 +14,7 @@ const errors = require('../../errors');
* - an error attribute of type errors.InvalidArgument if the range * - an error attribute of type errors.InvalidArgument if the range
* syntax is invalid * syntax is invalid
*/ */
function parseRangeSpec(rangeHeader) { export function parseRangeSpec(rangeHeader) {
const rangeMatch = /^bytes=([0-9]+)?-([0-9]+)?$/.exec(rangeHeader); const rangeMatch = /^bytes=([0-9]+)?-([0-9]+)?$/.exec(rangeHeader);
if (rangeMatch) { if (rangeMatch) {
const rangeValues = rangeMatch.slice(1, 3); const rangeValues = rangeMatch.slice(1, 3);
@ -55,7 +53,7 @@ function parseRangeSpec(rangeHeader) {
* - or an 'error' attribute of type errors.InvalidRange if the * - or an 'error' attribute of type errors.InvalidRange if the
* requested range is out of object's boundaries. * requested range is out of object's boundaries.
*/ */
function getByteRangeFromSpec(rangeSpec, objectSize) { export function getByteRangeFromSpec(rangeSpec, objectSize) {
if (rangeSpec.suffix !== undefined) { if (rangeSpec.suffix !== undefined) {
if (rangeSpec.suffix === 0) { if (rangeSpec.suffix === 0) {
// 0-byte suffix is always invalid (even on empty objects) // 0-byte suffix is always invalid (even on empty objects)
@ -72,8 +70,8 @@ function getByteRangeFromSpec(rangeSpec, objectSize) {
if (rangeSpec.start < objectSize) { if (rangeSpec.start < objectSize) {
// test is false if end is undefined // test is false if end is undefined
return { range: [rangeSpec.start, return { range: [rangeSpec.start,
(rangeSpec.end < objectSize ? (rangeSpec.end < objectSize ?
rangeSpec.end : objectSize - 1)] }; rangeSpec.end : objectSize - 1)] };
} }
return { error: errors.InvalidRange }; return { error: errors.InvalidRange };
} }
@ -95,7 +93,7 @@ function getByteRangeFromSpec(rangeSpec, objectSize) {
* - or an 'error' attribute instead of type errors.InvalidRange if * - or an 'error' attribute instead of type errors.InvalidRange if
* the requested range is out of object's boundaries. * the requested range is out of object's boundaries.
*/ */
function parseRange(rangeHeader, objectSize) { export function parseRange(rangeHeader, objectSize) {
const rangeSpec = parseRangeSpec(rangeHeader); const rangeSpec = parseRangeSpec(rangeHeader);
if (rangeSpec.error) { if (rangeSpec.error) {
// invalid range syntax is silently ignored in HTTP spec, // invalid range syntax is silently ignored in HTTP spec,
@ -105,15 +103,8 @@ function parseRange(rangeHeader, objectSize) {
return getByteRangeFromSpec(rangeSpec, objectSize); return getByteRangeFromSpec(rangeSpec, objectSize);
} }
function checkSupportIPv6() { export function checkSupportIPv6() {
const niList = os.networkInterfaces(); const niList = os.networkInterfaces();
return Object.keys(niList).some(network => return Object.keys(niList).some(network =>
niList[network].some(intfc => intfc.family === 'IPv6')); niList[network].some(intfc => intfc.family === 'IPv6'));
} }
module.exports = {
parseRangeSpec,
getByteRangeFromSpec,
parseRange,
checkSupportIPv6,
};

View File

@ -1,12 +1,8 @@
'use strict'; // eslint-disable-line import async from 'async';
/* eslint new-cap: "off" */ import errors from '../../errors';
import TTLVCodec from './codec/ttlv';
const async = require('async'); import TlsTransport from './transport/tls';
import KMIP from '.';
const errors = require('../../errors');
const TTLVCodec = require('./codec/ttlv.js');
const TlsTransport = require('./transport/tls.js');
const KMIP = require('.');
const CRYPTOGRAPHIC_OBJECT_TYPE = 'Symmetric Key'; const CRYPTOGRAPHIC_OBJECT_TYPE = 'Symmetric Key';
const CRYPTOGRAPHIC_ALGORITHM = 'AES'; const CRYPTOGRAPHIC_ALGORITHM = 'AES';
@ -55,7 +51,12 @@ function _arsenalError(err) {
if (typeof err === 'string') { if (typeof err === 'string') {
return errors.InternalError return errors.InternalError
.customizeDescription(`${messagePrefix} ${err}`); .customizeDescription(`${messagePrefix} ${err}`);
} else if (err instanceof Error) { } else if (
err instanceof Error ||
// INFO: The second part is here only for Jest, to remove when we'll be
// fully migrated to TS
(err && typeof err.message === 'string')
) {
return errors.InternalError return errors.InternalError
.customizeDescription(`${messagePrefix} ${err.message}`); .customizeDescription(`${messagePrefix} ${err.message}`);
} }
@ -90,8 +91,8 @@ function _negotiateProtocolVersion(client, logger, cb) {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::negotiateProtocolVersion', logger.error('KMIP::negotiateProtocolVersion',
{ error, { error,
vendorIdentification: client.vendorIdentification }); vendorIdentification: client.vendorIdentification });
return cb(error); return cb(error);
} }
const majorVersions = const majorVersions =
@ -102,8 +103,8 @@ function _negotiateProtocolVersion(client, logger, cb) {
majorVersions.length !== minorVersions.length) { majorVersions.length !== minorVersions.length) {
const error = _arsenalError('No suitable protocol version'); const error = _arsenalError('No suitable protocol version');
logger.error('KMIP::negotiateProtocolVersion', logger.error('KMIP::negotiateProtocolVersion',
{ error, { error,
vendorIdentification: client.vendorIdentification }); vendorIdentification: client.vendorIdentification });
return cb(error); return cb(error);
} }
client.kmip.changeProtocolVersion(majorVersions[0], minorVersions[0]); client.kmip.changeProtocolVersion(majorVersions[0], minorVersions[0]);
@ -126,8 +127,8 @@ function _mapExtensions(client, logger, cb) {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::mapExtensions', logger.error('KMIP::mapExtensions',
{ error, { error,
vendorIdentification: client.vendorIdentification }); vendorIdentification: client.vendorIdentification });
return cb(error); return cb(error);
} }
const extensionNames = response.lookup(searchFilter.extensionName); const extensionNames = response.lookup(searchFilter.extensionName);
@ -135,8 +136,8 @@ function _mapExtensions(client, logger, cb) {
if (extensionNames.length !== extensionTags.length) { if (extensionNames.length !== extensionTags.length) {
const error = _arsenalError('Inconsistent extension list'); const error = _arsenalError('Inconsistent extension list');
logger.error('KMIP::mapExtensions', logger.error('KMIP::mapExtensions',
{ error, { error,
vendorIdentification: client.vendorIdentification }); vendorIdentification: client.vendorIdentification });
return cb(error); return cb(error);
} }
extensionNames.forEach((extensionName, idx) => { extensionNames.forEach((extensionName, idx) => {
@ -160,7 +161,7 @@ function _queryServerInformation(client, logger, cb) {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.warn('KMIP::queryServerInformation', logger.warn('KMIP::queryServerInformation',
{ error }); { error });
/* no error returned, caller can keep going */ /* no error returned, caller can keep going */
return cb(); return cb();
} }
@ -170,9 +171,9 @@ function _queryServerInformation(client, logger, cb) {
JSON.stringify(response.lookup(searchFilter.serverInformation)[0])); JSON.stringify(response.lookup(searchFilter.serverInformation)[0]));
logger.info('KMIP Server identified', logger.info('KMIP Server identified',
{ vendorIdentification: client.vendorIdentification, { vendorIdentification: client.vendorIdentification,
serverInformation: client.serverInformation, serverInformation: client.serverInformation,
negotiatedProtocolVersion: client.kmip.protocolVersion }); negotiatedProtocolVersion: client.kmip.protocolVersion });
return cb(); return cb();
}); });
} }
@ -196,8 +197,8 @@ function _queryOperationsAndObjects(client, logger, cb) {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::queryOperationsAndObjects', logger.error('KMIP::queryOperationsAndObjects',
{ error, { error,
vendorIdentification: client.vendorIdentification }); vendorIdentification: client.vendorIdentification });
return cb(error); return cb(error);
} }
const supportedOperations = response.lookup(searchFilter.operation); const supportedOperations = response.lookup(searchFilter.operation);
@ -222,22 +223,21 @@ function _queryOperationsAndObjects(client, logger, cb) {
logger.warn('KMIP::queryOperationsAndObjects: ' + logger.warn('KMIP::queryOperationsAndObjects: ' +
'The KMIP Server announces that it ' + 'The KMIP Server announces that it ' +
'does not support all of the required features', 'does not support all of the required features',
{ vendorIdentification: client.vendorIdentification, { vendorIdentification: client.vendorIdentification,
serverInformation: client.serverInformation, serverInformation: client.serverInformation,
supportsEncrypt, supportsDecrypt, supportsEncrypt, supportsDecrypt,
supportsActivate, supportsRevoke, supportsActivate, supportsRevoke,
supportsCreate, supportsDestroy, supportsCreate, supportsDestroy,
supportsQuery, supportsSymmetricKeys }); supportsQuery, supportsSymmetricKeys });
} else { } else {
logger.info('KMIP Server provides the necessary feature set', logger.info('KMIP Server provides the necessary feature set',
{ vendorIdentification: client.vendorIdentification }); { vendorIdentification: client.vendorIdentification });
} }
return cb(); return cb();
}); });
} }
export default class Client {
class Client {
/** /**
* Construct a high level KMIP driver suitable for cloudserver * Construct a high level KMIP driver suitable for cloudserver
* @param {Object} options - Instance options * @param {Object} options - Instance options
@ -264,8 +264,8 @@ class Client {
this.vendorIdentification = ''; this.vendorIdentification = '';
this.serverInformation = []; this.serverInformation = [];
this.kmip = new KMIP(CodecClass || TTLVCodec, this.kmip = new KMIP(CodecClass || TTLVCodec,
TransportClass || TlsTransport, TransportClass || TlsTransport,
options); options);
this.kmip.registerHandshakeFunction((logger, cb) => { this.kmip.registerHandshakeFunction((logger, cb) => {
this._kmipHandshake(logger, cb); this._kmipHandshake(logger, cb);
}); });
@ -322,8 +322,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::_activateBucketKey', logger.error('KMIP::_activateBucketKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const uniqueIdentifier = const uniqueIdentifier =
@ -332,7 +332,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server did not return the expected identifier'); 'Server did not return the expected identifier');
logger.error('KMIP::cipherDataKey', logger.error('KMIP::cipherDataKey',
{ error, uniqueIdentifier }); { error, uniqueIdentifier });
return cb(error); return cb(error);
} }
return cb(null, keyIdentifier); return cb(null, keyIdentifier);
@ -351,20 +351,20 @@ class Client {
const attributes = []; const attributes = [];
if (!!this.options.bucketNameAttributeName) { if (!!this.options.bucketNameAttributeName) {
attributes.push(KMIP.Attribute('TextString', attributes.push(KMIP.Attribute('TextString',
this.options.bucketNameAttributeName, this.options.bucketNameAttributeName,
bucketName)); bucketName));
} }
attributes.push(...[ attributes.push(...[
KMIP.Attribute('Enumeration', 'Cryptographic Algorithm', KMIP.Attribute('Enumeration', 'Cryptographic Algorithm',
CRYPTOGRAPHIC_ALGORITHM), CRYPTOGRAPHIC_ALGORITHM),
KMIP.Attribute('Integer', 'Cryptographic Length', KMIP.Attribute('Integer', 'Cryptographic Length',
CRYPTOGRAPHIC_LENGTH), CRYPTOGRAPHIC_LENGTH),
KMIP.Attribute('Integer', 'Cryptographic Usage Mask', KMIP.Attribute('Integer', 'Cryptographic Usage Mask',
this.kmip.encodeMask('Cryptographic Usage Mask', this.kmip.encodeMask('Cryptographic Usage Mask',
CRYPTOGRAPHIC_USAGE_MASK))]); CRYPTOGRAPHIC_USAGE_MASK))]);
if (this.options.compoundCreateActivate) { if (this.options.compoundCreateActivate) {
attributes.push(KMIP.Attribute('Date-Time', 'Activation Date', attributes.push(KMIP.Attribute('Date-Time', 'Activation Date',
new Date(Date.UTC()))); new Date(Date.UTC())));
} }
return this.kmip.request(logger, 'Create', [ return this.kmip.request(logger, 'Create', [
@ -374,8 +374,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::createBucketKey', logger.error('KMIP::createBucketKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const createdObjectType = const createdObjectType =
@ -386,7 +386,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server created an object of wrong type'); 'Server created an object of wrong type');
logger.error('KMIP::createBucketKey', logger.error('KMIP::createBucketKey',
{ error, createdObjectType }); { error, createdObjectType });
return cb(error); return cb(error);
} }
if (!this.options.compoundCreateActivate) { if (!this.options.compoundCreateActivate) {
@ -411,16 +411,16 @@ class Client {
KMIP.TextString('Unique Identifier', bucketKeyId), KMIP.TextString('Unique Identifier', bucketKeyId),
KMIP.Structure('Revocation Reason', [ KMIP.Structure('Revocation Reason', [
KMIP.Enumeration('Revocation Reason Code', KMIP.Enumeration('Revocation Reason Code',
'Cessation of Operation'), 'Cessation of Operation'),
KMIP.TextString('Revocation Message', KMIP.TextString('Revocation Message',
'About to be deleted'), 'About to be deleted'),
]), ]),
], (err, response) => { ], (err, response) => {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::_revokeBucketKey', logger.error('KMIP::_revokeBucketKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const uniqueIdentifier = const uniqueIdentifier =
@ -429,7 +429,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server did not return the expected identifier'); 'Server did not return the expected identifier');
logger.error('KMIP::_revokeBucketKey', logger.error('KMIP::_revokeBucketKey',
{ error, uniqueIdentifier }); { error, uniqueIdentifier });
return cb(error); return cb(error);
} }
return cb(); return cb();
@ -448,8 +448,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::destroyBucketKey: revocation failed', logger.error('KMIP::destroyBucketKey: revocation failed',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
return this.kmip.request(logger, 'Destroy', [ return this.kmip.request(logger, 'Destroy', [
@ -458,8 +458,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::destroyBucketKey', logger.error('KMIP::destroyBucketKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const uniqueIdentifier = const uniqueIdentifier =
@ -468,7 +468,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server did not return the expected identifier'); 'Server did not return the expected identifier');
logger.error('KMIP::destroyBucketKey', logger.error('KMIP::destroyBucketKey',
{ error, uniqueIdentifier }); { error, uniqueIdentifier });
return cb(error); return cb(error);
} }
return cb(); return cb();
@ -487,19 +487,19 @@ class Client {
* @callback called with (err, cipheredDataKey: Buffer) * @callback called with (err, cipheredDataKey: Buffer)
*/ */
cipherDataKey(cryptoScheme, cipherDataKey(cryptoScheme,
masterKeyId, masterKeyId,
plainTextDataKey, plainTextDataKey,
logger, logger,
cb) { cb) {
return this.kmip.request(logger, 'Encrypt', [ return this.kmip.request(logger, 'Encrypt', [
KMIP.TextString('Unique Identifier', masterKeyId), KMIP.TextString('Unique Identifier', masterKeyId),
KMIP.Structure('Cryptographic Parameters', [ KMIP.Structure('Cryptographic Parameters', [
KMIP.Enumeration('Block Cipher Mode', KMIP.Enumeration('Block Cipher Mode',
CRYPTOGRAPHIC_CIPHER_MODE), CRYPTOGRAPHIC_CIPHER_MODE),
KMIP.Enumeration('Padding Method', KMIP.Enumeration('Padding Method',
CRYPTOGRAPHIC_PADDING_METHOD), CRYPTOGRAPHIC_PADDING_METHOD),
KMIP.Enumeration('Cryptographic Algorithm', KMIP.Enumeration('Cryptographic Algorithm',
CRYPTOGRAPHIC_ALGORITHM), CRYPTOGRAPHIC_ALGORITHM),
]), ]),
KMIP.ByteString('Data', plainTextDataKey), KMIP.ByteString('Data', plainTextDataKey),
KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV), KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV),
@ -507,8 +507,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::cipherDataKey', logger.error('KMIP::cipherDataKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const uniqueIdentifier = const uniqueIdentifier =
@ -518,7 +518,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server did not return the expected identifier'); 'Server did not return the expected identifier');
logger.error('KMIP::cipherDataKey', logger.error('KMIP::cipherDataKey',
{ error, uniqueIdentifier }); { error, uniqueIdentifier });
return cb(error); return cb(error);
} }
return cb(null, data); return cb(null, data);
@ -536,19 +536,19 @@ class Client {
* @callback called with (err, plainTextDataKey: Buffer) * @callback called with (err, plainTextDataKey: Buffer)
*/ */
decipherDataKey(cryptoScheme, decipherDataKey(cryptoScheme,
masterKeyId, masterKeyId,
cipheredDataKey, cipheredDataKey,
logger, logger,
cb) { cb) {
return this.kmip.request(logger, 'Decrypt', [ return this.kmip.request(logger, 'Decrypt', [
KMIP.TextString('Unique Identifier', masterKeyId), KMIP.TextString('Unique Identifier', masterKeyId),
KMIP.Structure('Cryptographic Parameters', [ KMIP.Structure('Cryptographic Parameters', [
KMIP.Enumeration('Block Cipher Mode', KMIP.Enumeration('Block Cipher Mode',
CRYPTOGRAPHIC_CIPHER_MODE), CRYPTOGRAPHIC_CIPHER_MODE),
KMIP.Enumeration('Padding Method', KMIP.Enumeration('Padding Method',
CRYPTOGRAPHIC_PADDING_METHOD), CRYPTOGRAPHIC_PADDING_METHOD),
KMIP.Enumeration('Cryptographic Algorithm', KMIP.Enumeration('Cryptographic Algorithm',
CRYPTOGRAPHIC_ALGORITHM), CRYPTOGRAPHIC_ALGORITHM),
]), ]),
KMIP.ByteString('Data', cipheredDataKey), KMIP.ByteString('Data', cipheredDataKey),
KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV), KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV),
@ -556,8 +556,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::decipherDataKey', logger.error('KMIP::decipherDataKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const uniqueIdentifier = const uniqueIdentifier =
@ -567,7 +567,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server did not return the right identifier'); 'Server did not return the right identifier');
logger.error('KMIP::decipherDataKey', logger.error('KMIP::decipherDataKey',
{ error, uniqueIdentifier }); { error, uniqueIdentifier });
return cb(error); return cb(error);
} }
return cb(null, data); return cb(null, data);
@ -599,5 +599,3 @@ class Client {
}); });
} }
} }
module.exports = Client;

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line import assert from 'assert';
const assert = require('assert');
function _lookup(decodedTTLV, path) { function _lookup(decodedTTLV, path) {
@ -31,7 +29,7 @@ function _lookup(decodedTTLV, path) {
return res; return res;
} }
class Message { export default class Message {
/** /**
* Construct a new abstract Message * Construct a new abstract Message
* @param {Object} content - the content of the message * @param {Object} content - the content of the message
@ -50,5 +48,3 @@ class Message {
return _lookup(this.content, path); return _lookup(this.content, path);
} }
} }
module.exports = Message;

View File

@ -1,8 +1,5 @@
'use strict'; // eslint-disable-line import KMIPTags from '../tags.json';
/* eslint dot-notation: "off" */ import KMIPMessage from '../Message';
const KMIPTags = require('../tags.json');
const KMIPMessage = require('../Message.js');
const UINT32_MAX = Math.pow(2, 32); const UINT32_MAX = Math.pow(2, 32);
@ -26,7 +23,7 @@ function _throwError(logger, msg, data) {
throw Error(msg); throw Error(msg);
} }
function TTLVCodec() { export default function TTLVCodec() {
if (!new.target) { if (!new.target) {
return new TTLVCodec(); return new TTLVCodec();
} }
@ -55,15 +52,15 @@ function TTLVCodec() {
const property = {}; const property = {};
if (!TypeDecoder[elementType]) { if (!TypeDecoder[elementType]) {
_throwError(logger, _throwError(logger,
'Unknown element type', 'Unknown element type',
{ funcName, elementTag, elementType }); { funcName, elementTag, elementType });
} }
const elementValue = value.slice(i + 8, const elementValue = value.slice(i + 8,
i + 8 + elementLength); i + 8 + elementLength);
if (elementValue.length !== elementLength) { if (elementValue.length !== elementLength) {
_throwError(logger, 'BUG: Wrong buffer size', _throwError(logger, 'BUG: Wrong buffer size',
{ funcName, elementLength, { funcName, elementLength,
bufferLength: elementValue.length }); bufferLength: elementValue.length });
} }
property.type = TypeDecoder[elementType].name; property.type = TypeDecoder[elementType].name;
property.value = TypeDecoder[elementType] property.value = TypeDecoder[elementType]
@ -75,7 +72,7 @@ function TTLVCodec() {
const tagInfo = TagDecoder[elementTag]; const tagInfo = TagDecoder[elementTag];
if (!tagInfo) { if (!tagInfo) {
logger.debug('Unknown element tag', logger.debug('Unknown element tag',
{ funcName, elementTag }); { funcName, elementTag });
property.tag = elementTag; property.tag = elementTag;
element['Unknown Tag'] = property; element['Unknown Tag'] = property;
} else { } else {
@ -83,8 +80,8 @@ function TTLVCodec() {
if (tagInfo.name === 'Attribute Name') { if (tagInfo.name === 'Attribute Name') {
if (property.type !== 'TextString') { if (property.type !== 'TextString') {
_throwError(logger, _throwError(logger,
'Invalide type', 'Invalide type',
{ funcName, type: property.type }); { funcName, type: property.type });
} }
diversion = property.value; diversion = property.value;
} }
@ -114,8 +111,8 @@ function TTLVCodec() {
} }
const itemResult = const itemResult =
TypeEncoder[itemType].encode(itemTagName, TypeEncoder[itemType].encode(itemTagName,
itemValue, itemValue,
itemDiversion); itemDiversion);
encodedValue = encodedValue encodedValue = encodedValue
.concat(_ttlvPadVector(itemResult)); .concat(_ttlvPadVector(itemResult));
}); });
@ -133,9 +130,9 @@ function TTLVCodec() {
const fixedLength = 4; const fixedLength = 4;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
return value.readUInt32BE(0); return value.readUInt32BE(0);
}, },
@ -156,16 +153,16 @@ function TTLVCodec() {
const fixedLength = 8; const fixedLength = 8;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
const longUInt = UINT32_MAX * value.readUInt32BE(0) + const longUInt = UINT32_MAX * value.readUInt32BE(0) +
value.readUInt32BE(4); value.readUInt32BE(4);
if (longUInt > Number.MAX_SAFE_INTEGER) { if (longUInt > Number.MAX_SAFE_INTEGER) {
_throwError(logger, _throwError(logger,
'53-bit overflow', '53-bit overflow',
{ funcName, longUInt }); { funcName, longUInt });
} }
return longUInt; return longUInt;
}, },
@ -200,9 +197,9 @@ function TTLVCodec() {
const fixedLength = 4; const fixedLength = 4;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
const enumValue = value.toString('hex'); const enumValue = value.toString('hex');
const actualTag = diversion ? TagEncoder[diversion].value : tag; const actualTag = diversion ? TagEncoder[diversion].value : tag;
@ -211,10 +208,10 @@ function TTLVCodec() {
!enumInfo.enumeration || !enumInfo.enumeration ||
!enumInfo.enumeration[enumValue]) { !enumInfo.enumeration[enumValue]) {
return { tag, return { tag,
value: enumValue, value: enumValue,
message: 'Unknown enumeration value', message: 'Unknown enumeration value',
diversion, diversion,
}; };
} }
return enumInfo.enumeration[enumValue]; return enumInfo.enumeration[enumValue];
}, },
@ -227,7 +224,7 @@ function TTLVCodec() {
const actualTag = diversion || tagName; const actualTag = diversion || tagName;
const encodedValue = const encodedValue =
Buffer.from(TagEncoder[actualTag].enumeration[value], Buffer.from(TagEncoder[actualTag].enumeration[value],
'hex'); 'hex');
return _ttlvPadVector([tag, type, length, encodedValue]); return _ttlvPadVector([tag, type, length, encodedValue]);
}, },
}, },
@ -238,9 +235,9 @@ function TTLVCodec() {
const fixedLength = 8; const fixedLength = 8;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
const msUInt = value.readUInt32BE(0); const msUInt = value.readUInt32BE(0);
const lsUInt = value.readUInt32BE(4); const lsUInt = value.readUInt32BE(4);
@ -267,7 +264,7 @@ function TTLVCodec() {
const length = Buffer.alloc(4); const length = Buffer.alloc(4);
length.writeUInt32BE(value.length); length.writeUInt32BE(value.length);
return _ttlvPadVector([tag, type, length, return _ttlvPadVector([tag, type, length,
Buffer.from(value, 'utf8')]); Buffer.from(value, 'utf8')]);
}, },
}, },
'08': { '08': {
@ -289,17 +286,17 @@ function TTLVCodec() {
const fixedLength = 8; const fixedLength = 8;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
const d = new Date(0); const d = new Date(0);
const utcSeconds = UINT32_MAX * value.readUInt32BE(0) + const utcSeconds = UINT32_MAX * value.readUInt32BE(0) +
value.readUInt32BE(4); value.readUInt32BE(4);
if (utcSeconds > Number.MAX_SAFE_INTEGER) { if (utcSeconds > Number.MAX_SAFE_INTEGER) {
_throwError(logger, _throwError(logger,
'53-bit overflow', '53-bit overflow',
{ funcName, utcSeconds }); { funcName, utcSeconds });
} }
d.setUTCSeconds(utcSeconds); d.setUTCSeconds(utcSeconds);
return d; return d;
@ -323,9 +320,9 @@ function TTLVCodec() {
const fixedLength = 4; const fixedLength = 4;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
return value.readInt32BE(0); return value.readInt32BE(0);
}, },
@ -415,8 +412,8 @@ function TTLVCodec() {
throw Error(`Unknown Type '${type}'`); throw Error(`Unknown Type '${type}'`);
} }
const itemValue = TypeEncoder[type].encode(key, const itemValue = TypeEncoder[type].encode(key,
item[key].value, item[key].value,
item[key].diversion); item[key].diversion);
result = result.concat(_ttlvPadVector(itemValue)); result = result.concat(_ttlvPadVector(itemValue));
}); });
}); });
@ -430,5 +427,3 @@ function TTLVCodec() {
}; };
return this; return this;
} }
module.exports = TTLVCodec;

View File

@ -1,9 +1,6 @@
'use strict'; // eslint-disable-line import uuidv4 from 'uuid/v4';
/* eslint new-cap: "off" */
const uuidv4 = require('uuid/v4'); import Message from './Message';
const Message = require('./Message.js');
/* This client requires at least a KMIP 1.2 compatible server */ /* This client requires at least a KMIP 1.2 compatible server */
const DEFAULT_PROTOCOL_VERSION_MAJOR = 1; const DEFAULT_PROTOCOL_VERSION_MAJOR = 1;
@ -22,7 +19,7 @@ function _PrimitiveType(tagName, type, value) {
return { [tagName]: { type, value } }; return { [tagName]: { type, value } };
} }
class KMIP { export default class KMIP {
/** /**
* Construct a new KMIP Object * Construct a new KMIP Object
* @param {Class} Codec - * @param {Class} Codec -
@ -275,11 +272,11 @@ class KMIP {
KMIP.Structure('Request Header', [ KMIP.Structure('Request Header', [
KMIP.Structure('Protocol Version', [ KMIP.Structure('Protocol Version', [
KMIP.Integer('Protocol Version Major', KMIP.Integer('Protocol Version Major',
this.protocolVersion.major), this.protocolVersion.major),
KMIP.Integer('Protocol Version Minor', KMIP.Integer('Protocol Version Minor',
this.protocolVersion.minor)]), this.protocolVersion.minor)]),
KMIP.Integer('Maximum Response Size', KMIP.Integer('Maximum Response Size',
this.maximumResponseSize), this.maximumResponseSize),
KMIP.Integer('Batch Count', 1)]), KMIP.Integer('Batch Count', 1)]),
KMIP.Structure('Batch Item', [ KMIP.Structure('Batch Item', [
KMIP.Enumeration('Operation', operation), KMIP.Enumeration('Operation', operation),
@ -292,7 +289,7 @@ class KMIP {
(err, conversation, rawResponse) => { (err, conversation, rawResponse) => {
if (err) { if (err) {
logger.error('KMIP::request: Failed to send message', logger.error('KMIP::request: Failed to send message',
{ error: err }); { error: err });
return cb(err); return cb(err);
} }
const response = this._decodeMessage(logger, rawResponse); const response = this._decodeMessage(logger, rawResponse);
@ -311,16 +308,16 @@ class KMIP {
this.transport.abortPipeline(conversation); this.transport.abortPipeline(conversation);
const error = Error('Invalid batch item ID returned'); const error = Error('Invalid batch item ID returned');
logger.error('KMIP::request: failed', logger.error('KMIP::request: failed',
{ resultUniqueBatchItemID, uuid, error }); { resultUniqueBatchItemID, uuid, error });
return cb(error); return cb(error);
} }
if (performedOperation !== operation) { if (performedOperation !== operation) {
this.transport.abortPipeline(conversation); this.transport.abortPipeline(conversation);
const error = Error('Operation mismatch', const error = Error('Operation mismatch',
{ got: performedOperation, { got: performedOperation,
expected: operation }); expected: operation });
logger.error('KMIP::request: Operation mismatch', logger.error('KMIP::request: Operation mismatch',
{ error }); { error });
return cb(error); return cb(error);
} }
if (resultStatus !== 'Success') { if (resultStatus !== 'Success') {
@ -331,20 +328,15 @@ class KMIP {
response.lookup( response.lookup(
'Response Message/Batch Item/Result Message')[0]; 'Response Message/Batch Item/Result Message')[0];
const error = Error('KMIP request failure', const error = Error('KMIP request failure',
{ resultStatus, { resultStatus,
resultReason, resultReason,
resultMessage }); resultMessage });
logger.error('KMIP::request: request failed', logger.error('KMIP::request: request failed',
{ error, resultStatus, { error, resultStatus,
resultReason, resultMessage }); resultReason, resultMessage });
return cb(error); return cb(error);
} }
return cb(null, response); return cb(null, response);
}); });
} }
} }
module.exports = KMIP;

View File

@ -1,11 +1,9 @@
'use strict'; // eslint-disable-line import assert from 'assert';
const assert = require('assert');
const DEFAULT_PIPELINE_DEPTH = 8; const DEFAULT_PIPELINE_DEPTH = 8;
const DEFAULT_KMIP_PORT = 5696; const DEFAULT_KMIP_PORT = 5696;
class TransportTemplate { export default class TransportTemplate {
/** /**
* Construct a new object of the TransportTemplate class * Construct a new object of the TransportTemplate class
* @param {Object} channel - Typically the tls object * @param {Object} channel - Typically the tls object
@ -86,8 +84,8 @@ class TransportTemplate {
const deferedRequest = this.deferedRequests.shift(); const deferedRequest = this.deferedRequests.shift();
process.nextTick(() => { process.nextTick(() => {
this.send(logger, this.send(logger,
deferedRequest.encodedMessage, deferedRequest.encodedMessage,
deferedRequest.cb); deferedRequest.cb);
}); });
} else if (this.callbackPipeline.length === 0 && } else if (this.callbackPipeline.length === 0 &&
this.deferedRequests.length === 0 && this.deferedRequests.length === 0 &&
@ -170,5 +168,3 @@ class TransportTemplate {
conversation.end(); conversation.end();
} }
} }
module.exports = TransportTemplate;

View File

@ -1,5 +1,3 @@
'use strict'; // eslint-disable-line
const tls = require('tls'); const tls = require('tls');
const TransportTemplate = require('./TransportTemplate.js'); const TransportTemplate = require('./TransportTemplate.js');
@ -9,4 +7,4 @@ class TlsTransport extends TransportTemplate {
} }
} }
module.exports = TlsTransport; module.exports = TlsTra

View File

@ -1,14 +1,14 @@
const httpServer = require('../http/server'); import httpServer from '../http/server';
const werelogs = require('werelogs'); import werelogs from 'werelogs';
const errors = require('../../errors'); import errors from '../../errors';
const ZenkoMetrics = require('../../metrics/ZenkoMetrics'); import ZenkoMetrics from '../../metrics/ZenkoMetrics';
const { sendSuccess, sendError } = require('./Utils'); import { sendSuccess, sendError } from './Utils';
function checkStub(log) { // eslint-disable-line function checkStub(log) { // eslint-disable-line
return true; return true;
} }
class HealthProbeServer extends httpServer { export default class HealthProbeServer extends httpServer {
constructor(params) { constructor(params) {
const logging = new werelogs.Logger('HealthProbeServer'); const logging = new werelogs.Logger('HealthProbeServer');
super(params.port, logging); super(params.port, logging);
@ -72,5 +72,3 @@ class HealthProbeServer extends httpServer {
res.end(ZenkoMetrics.asPrometheus()); res.end(ZenkoMetrics.asPrometheus());
} }
} }
module.exports = HealthProbeServer;

View File

@ -1,10 +1,10 @@
const httpServer = require('../http/server'); import httpServer from '../http/server';
const werelogs = require('werelogs'); import werelogs from 'werelogs';
const errors = require('../../errors'); import errors from '../../errors';
const DEFAULT_LIVE_ROUTE = '/_/live'; export const DEFAULT_LIVE_ROUTE = '/_/live';
const DEFAULT_READY_ROUTE = '/_/ready'; export const DEFAULT_READY_ROUTE = '/_/ready';
const DEFAULT_METRICS_ROUTE = '/metrics'; export const DEFAULT_METRICS_ROUTE = '/metrics';
/** /**
* ProbeDelegate is used to handle probe checks. * ProbeDelegate is used to handle probe checks.
@ -28,7 +28,7 @@ const DEFAULT_METRICS_ROUTE = '/metrics';
* *
* @extends {httpServer} * @extends {httpServer}
*/ */
class ProbeServer extends httpServer { export class ProbeServer extends httpServer {
/** /**
* Create a new ProbeServer with parameters * Create a new ProbeServer with parameters
* *
@ -92,10 +92,3 @@ class ProbeServer extends httpServer {
this._handlers.get(req.url)(res, log); this._handlers.get(req.url)(res, log);
} }
} }
module.exports = {
ProbeServer,
DEFAULT_LIVE_ROUTE,
DEFAULT_READY_ROUTE,
DEFAULT_METRICS_ROUTE,
};

View File

@ -5,7 +5,7 @@
* @param {string} [message] - Message to send as response, defaults to OK * @param {string} [message] - Message to send as response, defaults to OK
* @returns {undefined} * @returns {undefined}
*/ */
function sendSuccess(res, log, message = 'OK') { export function sendSuccess(res, log, message = 'OK') {
log.debug('replying with success'); log.debug('replying with success');
res.writeHead(200); res.writeHead(200);
res.end(message); res.end(message);
@ -19,14 +19,14 @@ function sendSuccess(res, log, message = 'OK') {
* @param {string} [optMessage] - Message to use instead of the errors message * @param {string} [optMessage] - Message to use instead of the errors message
* @returns {undefined} * @returns {undefined}
*/ */
function sendError(res, log, error, optMessage) { export function sendError(res, log, error, optMessage) {
const message = optMessage || error.description || ''; const message = optMessage || error.description || '';
log.debug('sending back error response', log.debug('sending back error response',
{ {
httpCode: error.code, httpCode: error.code,
errorType: error.message, errorType: error.message,
error: message, error: message,
} },
); );
res.writeHead(error.code); res.writeHead(error.code);
res.end(JSON.stringify({ res.end(JSON.stringify({
@ -34,8 +34,3 @@ function sendError(res, log, error, optMessage) {
errorMessage: message, errorMessage: message,
})); }));
} }
module.exports = {
sendSuccess,
sendError,
};

View File

@ -1,12 +1,10 @@
'use strict'; // eslint-disable-line import assert from 'assert';
import http from 'http';
import werelogs from 'werelogs';
const assert = require('assert'); import * as constants from '../../constants';
const http = require('http'); import * as utils from './utils';
const werelogs = require('werelogs'); import errors from '../../errors';
const constants = require('../../constants');
const utils = require('./utils');
const errors = require('../../errors');
const HttpAgent = require('agentkeepalive'); const HttpAgent = require('agentkeepalive');
@ -64,7 +62,7 @@ function makeErrorFromHTTPResponse(response) {
* *
* The API is usable when the object is constructed. * The API is usable when the object is constructed.
*/ */
class RESTClient { export default class RESTClient {
/** /**
* Interface to the data file server * Interface to the data file server
* @constructor * @constructor
@ -311,5 +309,3 @@ class RESTClient {
* @callback RESTClient~deleteCallback * @callback RESTClient~deleteCallback
* @param {Error} - The encountered error * @param {Error} - The encountered error
*/ */
module.exports = RESTClient;

View File

@ -1,15 +1,13 @@
'use strict'; // eslint-disable-line import assert from 'assert';
import url from 'url';
const assert = require('assert'); import werelogs from 'werelogs';
const url = require('url');
const werelogs = require('werelogs'); import httpServer from '../http/server';
import * as constants from '../../constants';
const httpServer = require('../http/server'); import { parseURL } from './utils';
const constants = require('../../constants'); import * as httpUtils from '../http/utils';
const { parseURL } = require('./utils'); import errors from '../../errors';
const httpUtils = require('../http/utils');
const errors = require('../../errors');
function setContentLength(response, contentLength) { function setContentLength(response, contentLength) {
response.setHeader('Content-Length', contentLength.toString()); response.setHeader('Content-Length', contentLength.toString());
@ -19,7 +17,7 @@ function setContentRange(response, byteRange, objectSize) {
const [start, end] = byteRange; const [start, end] = byteRange;
assert(start !== undefined && end !== undefined); assert(start !== undefined && end !== undefined);
response.setHeader('Content-Range', response.setHeader('Content-Range',
`bytes ${start}-${end}/${objectSize}`); `bytes ${start}-${end}/${objectSize}`);
} }
function sendError(res, log, error, optMessage) { function sendError(res, log, error, optMessage) {
@ -44,8 +42,7 @@ function sendError(res, log, error, optMessage) {
* You have to call setup() to initialize the storage backend, then * You have to call setup() to initialize the storage backend, then
* start() to start listening to the configured port. * start() to start listening to the configured port.
*/ */
class RESTServer extends httpServer { export default class RESTServer extends httpServer {
/** /**
* @constructor * @constructor
* @param {Object} params - constructor params * @param {Object} params - constructor params
@ -227,7 +224,7 @@ class RESTServer extends httpServer {
return sendError(res, log, err); return sendError(res, log, err);
} }
log.debug('sending back 200/206 response with contents', log.debug('sending back 200/206 response with contents',
{ key: pathInfo.key }); { key: pathInfo.key });
setContentLength(res, contentLength); setContentLength(res, contentLength);
res.setHeader('Accept-Ranges', 'bytes'); res.setHeader('Accept-Ranges', 'bytes');
if (byteRange) { if (byteRange) {
@ -265,7 +262,7 @@ class RESTServer extends httpServer {
return sendError(res, log, err); return sendError(res, log, err);
} }
log.debug('sending back 204 response to DELETE', log.debug('sending back 204 response to DELETE',
{ key: pathInfo.key }); { key: pathInfo.key });
res.writeHead(204); res.writeHead(204);
return res.end(() => { return res.end(() => {
log.debug('DELETE response sent', { key: pathInfo.key }); log.debug('DELETE response sent', { key: pathInfo.key });
@ -274,5 +271,3 @@ class RESTServer extends httpServer {
return undefined; return undefined;
} }
} }
module.exports = RESTServer;

View File

@ -1,12 +1,10 @@
'use strict'; // eslint-disable-line import errors from '../../errors';
import * as constants from '../../constants';
const errors = require('../../errors'); import * as url from 'url';
const constants = require('../../constants');
const url = require('url');
const passthroughPrefixLength = constants.passthroughFileURL.length; const passthroughPrefixLength = constants.passthroughFileURL.length;
function explodePath(path) { export function explodePath(path) {
if (path.startsWith(constants.passthroughFileURL)) { if (path.startsWith(constants.passthroughFileURL)) {
const key = path.slice(passthroughPrefixLength + 1); const key = path.slice(passthroughPrefixLength + 1);
return { return {
@ -19,7 +17,7 @@ function explodePath(path) {
return { return {
service: pathMatch[1], service: pathMatch[1],
key: (pathMatch[3] !== undefined && pathMatch[3].length > 0 ? key: (pathMatch[3] !== undefined && pathMatch[3].length > 0 ?
pathMatch[3] : undefined), pathMatch[3] : undefined),
}; };
} }
throw errors.InvalidURI.customizeDescription('malformed URI'); throw errors.InvalidURI.customizeDescription('malformed URI');
@ -37,7 +35,7 @@ function explodePath(path) {
* - pathInfo.service {String} - The name of REST service ("DataFile") * - pathInfo.service {String} - The name of REST service ("DataFile")
* - pathInfo.key {String} - The requested key * - pathInfo.key {String} - The requested key
*/ */
function parseURL(urlStr, expectKey) { export function parseURL(urlStr, expectKey) {
const urlObj = url.parse(urlStr); const urlObj = url.parse(urlStr);
const pathInfo = explodePath(decodeURI(urlObj.path)); const pathInfo = explodePath(decodeURI(urlObj.path));
if ((pathInfo.service !== constants.dataFileURL) if ((pathInfo.service !== constants.dataFileURL)
@ -61,8 +59,3 @@ function parseURL(urlStr, expectKey) {
} }
return pathInfo; return pathInfo;
} }
module.exports = {
explodePath,
parseURL,
};

View File

@ -1,8 +1,6 @@
'use strict'; // eslint-disable-line import assert from 'assert';
const assert = require('assert'); import * as rpc from './rpc';
const rpc = require('./rpc.js');
/** /**
* @class * @class
@ -17,7 +15,6 @@ const rpc = require('./rpc.js');
* RPC client object accessing the sub-level transparently. * RPC client object accessing the sub-level transparently.
*/ */
class LevelDbClient extends rpc.BaseClient { class LevelDbClient extends rpc.BaseClient {
/** /**
* @constructor * @constructor
* *
@ -78,7 +75,6 @@ class LevelDbClient extends rpc.BaseClient {
* env.subDb (env is passed as first parameter of received RPC calls). * env.subDb (env is passed as first parameter of received RPC calls).
*/ */
class LevelDbService extends rpc.BaseService { class LevelDbService extends rpc.BaseService {
/** /**
* @constructor * @constructor
* *

View File

@ -1,17 +1,14 @@
'use strict'; // eslint-disable-line import http from 'http';
import io from 'socket.io';
import ioClient from 'socket.io-client';
import * as sioStream from './sio-stream';
import async from 'async';
import assert from 'assert';
import { EventEmitter } from 'events';
const http = require('http'); import { flattenError, reconstructError } from './utils';
const io = require('socket.io'); import errors from '../../errors';
const ioClient = require('socket.io-client'); import * as jsutil from '../../jsutil';
const sioStream = require('./sio-stream');
const async = require('async');
const assert = require('assert');
const EventEmitter = require('events').EventEmitter;
const flattenError = require('./utils').flattenError;
const reconstructError = require('./utils').reconstructError;
const errors = require('../../errors');
const jsutil = require('../../jsutil');
const DEFAULT_CALL_TIMEOUT_MS = 30000; const DEFAULT_CALL_TIMEOUT_MS = 30000;
@ -36,8 +33,7 @@ let streamRPCJSONObj;
* - the return value is passed as callback's second argument (unless * - the return value is passed as callback's second argument (unless
* an error occurred). * an error occurred).
*/ */
class BaseClient extends EventEmitter { export class BaseClient extends EventEmitter {
/** /**
* @constructor * @constructor
* *
@ -54,7 +50,7 @@ class BaseClient extends EventEmitter {
*/ */
constructor(params) { constructor(params) {
const { url, logger, callTimeoutMs, const { url, logger, callTimeoutMs,
streamMaxPendingAck, streamAckTimeoutMs } = params; streamMaxPendingAck, streamAckTimeoutMs } = params;
assert(url); assert(url);
assert(logger); assert(logger);
@ -82,11 +78,11 @@ class BaseClient extends EventEmitter {
_call(remoteCall, args, cb) { _call(remoteCall, args, cb) {
const wrapCb = (err, data) => { const wrapCb = (err, data) => {
cb(reconstructError(err), cb(reconstructError(err),
this.socketStreams.decodeStreams(data)); this.socketStreams.decodeStreams(data));
}; };
this.logger.debug('remote call', { remoteCall, args }); this.logger.debug('remote call', { remoteCall, args });
this.socket.emit('call', remoteCall, this.socket.emit('call', remoteCall,
this.socketStreams.encodeStreams(args), wrapCb); this.socketStreams.encodeStreams(args), wrapCb);
return undefined; return undefined;
} }
@ -113,8 +109,8 @@ class BaseClient extends EventEmitter {
throw new Error(`argument cb=${cb} is not a callback`); throw new Error(`argument cb=${cb} is not a callback`);
} }
async.timeout(this._call.bind(this), timeoutMs, async.timeout(this._call.bind(this), timeoutMs,
`operation ${remoteCall} timed out`)(remoteCall, `operation ${remoteCall} timed out`)(remoteCall,
args, cb); args, cb);
return undefined; return undefined;
} }
@ -142,7 +138,7 @@ class BaseClient extends EventEmitter {
const url = this.url; const url = this.url;
this.socket.on('error', err => { this.socket.on('error', err => {
this.logger.warn('connectivity error to the RPC service', this.logger.warn('connectivity error to the RPC service',
{ url, error: err }); { url, error: err });
}); });
this.socket.on('connect', () => { this.socket.on('connect', () => {
this.emit('connect'); this.emit('connect');
@ -156,7 +152,7 @@ class BaseClient extends EventEmitter {
this.getManifest((err, manifest) => { this.getManifest((err, manifest) => {
if (err) { if (err) {
this.logger.error('Error fetching manifest from RPC server', this.logger.error('Error fetching manifest from RPC server',
{ error: err }); { error: err });
} else { } else {
manifest.api.forEach(apiItem => { manifest.api.forEach(apiItem => {
this.createCall(apiItem.name); this.createCall(apiItem.name);
@ -250,8 +246,7 @@ class BaseClient extends EventEmitter {
* method. * method.
* *
*/ */
class BaseService { export class BaseService {
/** /**
* @constructor * @constructor
* *
@ -469,7 +464,8 @@ class BaseService {
* @return {Object} a server object, not yet listening on a TCP port * @return {Object} a server object, not yet listening on a TCP port
* (you must call listen(port) on the returned object) * (you must call listen(port) on the returned object)
*/ */
function RPCServer(params) {
export function RPCServer(params) {
assert(params.logger); assert(params.logger);
const httpServer = http.createServer(); const httpServer = http.createServer();
@ -497,7 +493,7 @@ function RPCServer(params) {
conn.on('error', err => { conn.on('error', err => {
log.error('error on socket.io connection', log.error('error on socket.io connection',
{ namespace: service.namespace, error: err }); { namespace: service.namespace, error: err });
}); });
conn.on('call', (remoteCall, args, cb) => { conn.on('call', (remoteCall, args, cb) => {
const decodedArgs = streamsSocket.decodeStreams(args); const decodedArgs = streamsSocket.decodeStreams(args);
@ -647,8 +643,8 @@ streamRPCJSONObj = function _streamRPCJSONObj(obj, wstream, cb) {
// primitive types // primitive types
if (obj === undefined) { if (obj === undefined) {
wstream.write('null'); // if undefined elements are present in wstream.write('null'); // if undefined elements are present in
// arrays, convert them to JSON null // arrays, convert them to JSON null
// objects // objects
} else { } else {
wstream.write(JSON.stringify(obj)); wstream.write(JSON.stringify(obj));
} }
@ -668,7 +664,7 @@ streamRPCJSONObj = function _streamRPCJSONObj(obj, wstream, cb) {
* @return {Object} a HTTP server object, not yet listening on a TCP * @return {Object} a HTTP server object, not yet listening on a TCP
* port (you must call listen(port) on the returned object) * port (you must call listen(port) on the returned object)
*/ */
function RESTServer(params) { export function RESTServer(params) {
assert(params); assert(params);
assert(params.logger); assert(params.logger);
const httpServer = http.createServer((req, res) => { const httpServer = http.createServer((req, res) => {
@ -739,11 +735,3 @@ function RESTServer(params) {
return httpServer; return httpServer;
} }
module.exports = {
BaseClient,
BaseService,
RPCServer,
RESTServer,
};

View File

@ -1,13 +1,11 @@
'use strict'; // eslint-disable-line import uuid from 'uuid';
import stream from 'stream';
import debug_ from 'debug';
import assert from 'assert';
import async from 'async';
import { flattenError, reconstructError } from './utils';
const debug = debug_('sio-stream');
const uuid = require('uuid');
const stream = require('stream');
const debug = require('debug')('sio-stream');
const assert = require('assert');
const async = require('async');
const flattenError = require('./utils').flattenError;
const reconstructError = require('./utils').reconstructError;
const DEFAULT_MAX_PENDING_ACK = 4; const DEFAULT_MAX_PENDING_ACK = 4;
const DEFAULT_ACK_TIMEOUT_MS = 5000; const DEFAULT_ACK_TIMEOUT_MS = 5000;
@ -16,7 +14,7 @@ class SIOOutputStream extends stream.Writable {
constructor(socket, streamId, maxPendingAck, ackTimeoutMs) { constructor(socket, streamId, maxPendingAck, ackTimeoutMs) {
super({ objectMode: true }); super({ objectMode: true });
this._initOutputStream(socket, streamId, maxPendingAck, this._initOutputStream(socket, streamId, maxPendingAck,
ackTimeoutMs); ackTimeoutMs);
} }
_initOutputStream(socket, streamId, maxPendingAck, ackTimeoutMs) { _initOutputStream(socket, streamId, maxPendingAck, ackTimeoutMs) {
@ -194,7 +192,7 @@ class SIOStreamSocket {
this.socket.on('stream-data', (payload, cb) => { this.socket.on('stream-data', (payload, cb) => {
const { streamId, data } = payload; const { streamId, data } = payload;
log.debug('received \'stream-data\' event', log.debug('received \'stream-data\' event',
{ streamId, size: data.length }); { streamId, size: data.length });
const stream = this.remoteStreams[streamId]; const stream = this.remoteStreams[streamId];
if (!stream) { if (!stream) {
log.debug('no such remote stream registered', { streamId }); log.debug('no such remote stream registered', { streamId });
@ -280,15 +278,15 @@ class SIOStreamSocket {
let transportStream; let transportStream;
if (isReadStream) { if (isReadStream) {
transportStream = new SIOOutputStream(this, streamId, transportStream = new SIOOutputStream(this, streamId,
this.maxPendingAck, this.maxPendingAck,
this.ackTimeoutMs); this.ackTimeoutMs);
} else { } else {
transportStream = new SIOInputStream(this, streamId); transportStream = new SIOInputStream(this, streamId);
} }
this.localStreams[streamId] = arg; this.localStreams[streamId] = arg;
arg.once('close', () => { arg.once('close', () => {
log.debug('stream closed, removing from local streams', log.debug('stream closed, removing from local streams',
{ streamId }); { streamId });
delete this.localStreams[streamId]; delete this.localStreams[streamId];
}); });
arg.on('error', error => { arg.on('error', error => {
@ -350,8 +348,8 @@ class SIOStreamSocket {
stream = new SIOInputStream(this, streamId); stream = new SIOInputStream(this, streamId);
} else if (arg.writable) { } else if (arg.writable) {
stream = new SIOOutputStream(this, streamId, stream = new SIOOutputStream(this, streamId,
this.maxPendingAck, this.maxPendingAck,
this.ackTimeoutMs); this.ackTimeoutMs);
} else { } else {
throw new Error('can\'t decode stream neither readable ' + throw new Error('can\'t decode stream neither readable ' +
'nor writable'); 'nor writable');
@ -360,14 +358,14 @@ class SIOStreamSocket {
if (arg.readable) { if (arg.readable) {
stream.once('close', () => { stream.once('close', () => {
log.debug('stream closed, removing from remote streams', log.debug('stream closed, removing from remote streams',
{ streamId }); { streamId });
delete this.remoteStreams[streamId]; delete this.remoteStreams[streamId];
}); });
} }
if (arg.writable) { if (arg.writable) {
stream.once('finish', () => { stream.once('finish', () => {
log.debug('stream finished, removing from remote streams', log.debug('stream finished, removing from remote streams',
{ streamId }); { streamId });
delete this.remoteStreams[streamId]; delete this.remoteStreams[streamId];
}); });
} }
@ -399,7 +397,7 @@ class SIOStreamSocket {
_write(streamId, data, cb) { _write(streamId, data, cb) {
this.logger.debug('emit \'stream-data\' event', this.logger.debug('emit \'stream-data\' event',
{ streamId, size: data.length }); { streamId, size: data.length });
this.socket.emit('stream-data', { streamId, data }, cb); this.socket.emit('stream-data', { streamId, data }, cb);
} }
@ -433,7 +431,7 @@ class SIOStreamSocket {
} }
} }
module.exports.createSocket = function createSocket( export function createSocket(
socket, socket,
logger, logger,
maxPendingAck = DEFAULT_MAX_PENDING_ACK, maxPendingAck = DEFAULT_MAX_PENDING_ACK,

View File

@ -1,5 +1,3 @@
'use strict'; // eslint-disable-line
/** /**
* @brief turn all <tt>err</tt> own and prototype attributes into own attributes * @brief turn all <tt>err</tt> own and prototype attributes into own attributes
* *
@ -9,7 +7,7 @@
* @param {Error} err error object * @param {Error} err error object
* @return {Object} flattened object containing <tt>err</tt> attributes * @return {Object} flattened object containing <tt>err</tt> attributes
*/ */
module.exports.flattenError = function flattenError(err) { export function flattenError(err) {
if (!err) { if (!err) {
return err; return err;
} }
@ -35,7 +33,7 @@ module.exports.flattenError = function flattenError(err) {
* @return {Error} a reconstructed Error object inheriting <tt>err</tt> * @return {Error} a reconstructed Error object inheriting <tt>err</tt>
* attributes * attributes
*/ */
module.exports.reconstructError = function reconstructError(err) { export function reconstructError(err) {
if (!err) { if (!err) {
return err; return err;
} }

View File

@ -1,9 +1,7 @@
'use strict'; // eslint-disable-line strict import { URL } from 'url';
import { decryptSecret } from '../executables/pensieveCreds/utils';
const { URL } = require('url'); export function patchLocations(overlayLocations, creds, log) {
const { decryptSecret } = require('../executables/pensieveCreds/utils');
function patchLocations(overlayLocations, creds, log) {
if (!overlayLocations) { if (!overlayLocations) {
return {}; return {};
} }
@ -153,7 +151,3 @@ function patchLocations(overlayLocations, creds, log) {
}); });
return locations; return locations;
} }
module.exports = {
patchLocations,
};

View File

@ -1,9 +1,7 @@
'use strict'; // eslint-disable-line strict import Ajv from 'ajv';
import userPolicySchema from './userPolicySchema.json';
const Ajv = require('ajv'); import resourcePolicySchema from './resourcePolicySchema.json';
const userPolicySchema = require('./userPolicySchema'); import errors from '../errors';
const resourcePolicySchema = require('./resourcePolicySchema');
const errors = require('../errors');
const ajValidate = new Ajv({ allErrors: true }); const ajValidate = new Ajv({ allErrors: true });
ajValidate.addMetaSchema(require('ajv/lib/refs/json-schema-draft-06.json')); ajValidate.addMetaSchema(require('ajv/lib/refs/json-schema-draft-06.json'));
@ -117,7 +115,7 @@ function _validatePolicy(type, policy) {
* @returns {Object} - returns object with properties error and value * @returns {Object} - returns object with properties error and value
* @returns {ValidationResult} - result of the validation * @returns {ValidationResult} - result of the validation
*/ */
function validateUserPolicy(policy) { export function validateUserPolicy(policy) {
return _validatePolicy('user', policy); return _validatePolicy('user', policy);
} }
@ -127,11 +125,6 @@ function validateUserPolicy(policy) {
* @returns {Object} - returns object with properties error and value * @returns {Object} - returns object with properties error and value
* @returns {ValidationResult} - result of the validation * @returns {ValidationResult} - result of the validation
*/ */
function validateResourcePolicy(policy) { export function validateResourcePolicy(policy) {
return _validatePolicy('resource', policy); return _validatePolicy('resource', policy);
} }
module.exports = {
validateUserPolicy,
validateResourcePolicy,
};

View File

@ -1,19 +1,17 @@
'use strict'; // eslint-disable-line strict import { parseIp } from '../ipCheck';
const parseIp = require('../ipCheck').parseIp;
// http://docs.aws.amazon.com/IAM/latest/UserGuide/list_s3.html // http://docs.aws.amazon.com/IAM/latest/UserGuide/list_s3.html
// For MPU actions: // For MPU actions:
// http://docs.aws.amazon.com/AmazonS3/latest/dev/mpuAndPermissions.html // http://docs.aws.amazon.com/AmazonS3/latest/dev/mpuAndPermissions.html
// For bucket head and object head: // For bucket head and object head:
// http://docs.aws.amazon.com/AmazonS3/latest/dev/ // http://docs.aws.amazon.com/AmazonS3/latest/dev/
// using-with-s3-actions.html // using-with-s3-actions.html
const { import {
actionMapRQ, actionMapRQ,
actionMapIAM, actionMapIAM,
actionMapSSO, actionMapSSO,
actionMapSTS, actionMapSTS,
actionMapMetadata, actionMapMetadata,
} = require('./utils/actionMaps'); } from './utils/actionMaps';
const _actionNeedQuotaCheck = { const _actionNeedQuotaCheck = {
objectPut: true, objectPut: true,
@ -129,7 +127,7 @@ function _buildArn(service, generalResource, specificResource, requesterInfo) {
* @return {RequestContext} a RequestContext instance * @return {RequestContext} a RequestContext instance
*/ */
class RequestContext { export default class RequestContext {
constructor(headers, query, generalResource, specificResource, constructor(headers, query, generalResource, specificResource,
requesterIp, sslEnabled, apiMethod, requesterIp, sslEnabled, apiMethod,
awsService, locationConstraint, requesterInfo, awsService, locationConstraint, requesterInfo,
@ -648,5 +646,3 @@ class RequestContext {
return this._needTagEval; return this._needTagEval;
} }
} }
module.exports = RequestContext;

View File

@ -1,14 +1,8 @@
'use strict'; // eslint-disable-line strict import { substituteVariables } from './utils/variables';
import { handleWildcards } from './utils/wildcards';
const substituteVariables = require('./utils/variables.js'); import { findConditionKey, convertConditionOperator } from './utils/conditions.js';
const handleWildcards = require('./utils/wildcards.js').handleWildcards; import checkArnMatch from './utils/checkArnMatch.js';
const conditions = require('./utils/conditions.js'); import { transformTagKeyValue } from './utils/objectTags';
const findConditionKey = conditions.findConditionKey;
const convertConditionOperator = conditions.convertConditionOperator;
const checkArnMatch = require('./utils/checkArnMatch.js');
const { transformTagKeyValue } = require('./utils/objectTags');
const evaluators = {};
const operatorsWithVariables = ['StringEquals', 'StringNotEquals', const operatorsWithVariables = ['StringEquals', 'StringNotEquals',
'StringEqualsIgnoreCase', 'StringNotEqualsIgnoreCase', 'StringEqualsIgnoreCase', 'StringNotEqualsIgnoreCase',
@ -28,7 +22,7 @@ const tagConditions = new Set(['s3:ExistingObjectTag', 's3:RequestObjectTagKey',
* @param {object} log - logger * @param {object} log - logger
* @return {boolean} true if applicable, false if not * @return {boolean} true if applicable, false if not
*/ */
evaluators.isResourceApplicable = (requestContext, statementResource, log) => { export function isResourceApplicable(requestContext, statementResource, log) {
const resource = requestContext.getResource(); const resource = requestContext.getResource();
if (!Array.isArray(statementResource)) { if (!Array.isArray(statementResource)) {
// eslint-disable-next-line no-param-reassign // eslint-disable-next-line no-param-reassign
@ -50,7 +44,7 @@ evaluators.isResourceApplicable = (requestContext, statementResource, log) => {
requestResourceArr, true); requestResourceArr, true);
if (arnSegmentsMatch) { if (arnSegmentsMatch) {
log.trace('policy resource is applicable to request', log.trace('policy resource is applicable to request',
{ requestResource: resource, policyResource }); { requestResource: resource, policyResource });
return true; return true;
} }
continue; continue;
@ -69,7 +63,7 @@ evaluators.isResourceApplicable = (requestContext, statementResource, log) => {
* @param {Object} log - logger * @param {Object} log - logger
* @return {boolean} true if applicable, false if not * @return {boolean} true if applicable, false if not
*/ */
evaluators.isActionApplicable = (requestAction, statementAction, log) => { export function isActionApplicable(requestAction, statementAction, log) {
if (!Array.isArray(statementAction)) { if (!Array.isArray(statementAction)) {
// eslint-disable-next-line no-param-reassign // eslint-disable-next-line no-param-reassign
statementAction = [statementAction]; statementAction = [statementAction];
@ -101,7 +95,7 @@ evaluators.isActionApplicable = (requestAction, statementAction, log) => {
* @return {Object} contains whether conditions are allowed and whether they * @return {Object} contains whether conditions are allowed and whether they
* contain any tag condition keys * contain any tag condition keys
*/ */
evaluators.meetConditions = (requestContext, statementCondition, log) => { export function meetConditions(requestContext, statementCondition, log) {
// The Condition portion of a policy is an object with different // The Condition portion of a policy is an object with different
// operators as keys // operators as keys
const conditionEval = {}; const conditionEval = {};
@ -204,7 +198,7 @@ evaluators.meetConditions = (requestContext, statementCondition, log) => {
* @return {string} Allow if permitted, Deny if not permitted or Neutral * @return {string} Allow if permitted, Deny if not permitted or Neutral
* if not applicable * if not applicable
*/ */
evaluators.evaluatePolicy = (requestContext, policy, log) => { export function evaluatePolicy(requestContext, policy, log) {
// TODO: For bucket policies need to add Principal evaluation // TODO: For bucket policies need to add Principal evaluation
let verdict = 'Neutral'; let verdict = 'Neutral';
@ -216,33 +210,33 @@ evaluators.evaluatePolicy = (requestContext, policy, log) => {
const currentStatement = policy.Statement[i]; const currentStatement = policy.Statement[i];
// If affirmative resource is in policy and request resource is // If affirmative resource is in policy and request resource is
// not applicable, move on to next statement // not applicable, move on to next statement
if (currentStatement.Resource && !evaluators.isResourceApplicable(requestContext, if (currentStatement.Resource && !isResourceApplicable(requestContext,
currentStatement.Resource, log)) { currentStatement.Resource, log)) {
continue; continue;
} }
// If NotResource is in policy and resource matches NotResource // If NotResource is in policy and resource matches NotResource
// in policy, move on to next statement // in policy, move on to next statement
if (currentStatement.NotResource && if (currentStatement.NotResource &&
evaluators.isResourceApplicable(requestContext, isResourceApplicable(requestContext,
currentStatement.NotResource, log)) { currentStatement.NotResource, log)) {
continue; continue;
} }
// If affirmative action is in policy and request action is not // If affirmative action is in policy and request action is not
// applicable, move on to next statement // applicable, move on to next statement
if (currentStatement.Action && if (currentStatement.Action &&
!evaluators.isActionApplicable(requestContext.getAction(), !isActionApplicable(requestContext.getAction(),
currentStatement.Action, log)) { currentStatement.Action, log)) {
continue; continue;
} }
// If NotAction is in policy and action matches NotAction in policy, // If NotAction is in policy and action matches NotAction in policy,
// move on to next statement // move on to next statement
if (currentStatement.NotAction && if (currentStatement.NotAction &&
evaluators.isActionApplicable(requestContext.getAction(), isActionApplicable(requestContext.getAction(),
currentStatement.NotAction, log)) { currentStatement.NotAction, log)) {
continue; continue;
} }
const conditionEval = currentStatement.Condition ? const conditionEval = currentStatement.Condition ?
evaluators.meetConditions(requestContext, currentStatement.Condition, log) : meetConditions(requestContext, currentStatement.Condition, log) :
null; null;
// If do not meet conditions move on to next statement // If do not meet conditions move on to next statement
if (conditionEval && !conditionEval.allow) { if (conditionEval && !conditionEval.allow) {
@ -276,12 +270,12 @@ evaluators.evaluatePolicy = (requestContext, policy, log) => {
* @return {string} Allow if permitted, Deny if not permitted. * @return {string} Allow if permitted, Deny if not permitted.
* Default is to Deny. Deny overrides an Allow * Default is to Deny. Deny overrides an Allow
*/ */
evaluators.evaluateAllPolicies = (requestContext, allPolicies, log) => { export function evaluateAllPolicies(requestContext, allPolicies, log) {
log.trace('evaluating all policies'); log.trace('evaluating all policies');
let verdict = 'Deny'; let verdict = 'Deny';
for (let i = 0; i < allPolicies.length; i++) { for (let i = 0; i < allPolicies.length; i++) {
const singlePolicyVerdict = const singlePolicyVerdict =
evaluators.evaluatePolicy(requestContext, allPolicies[i], log); evaluatePolicy(requestContext, allPolicies[i], log);
// If there is any Deny, just return Deny // If there is any Deny, just return Deny
if (singlePolicyVerdict === 'Deny') { if (singlePolicyVerdict === 'Deny') {
return 'Deny'; return 'Deny';
@ -293,5 +287,3 @@ evaluators.evaluateAllPolicies = (requestContext, allPolicies, log) => {
log.trace('result of evaluating all pollicies', { verdict }); log.trace('result of evaluating all pollicies', { verdict });
return verdict; return verdict;
}; };
module.exports = evaluators;

View File

@ -1,9 +1,9 @@
const { meetConditions } = require('./evaluator'); import { meetConditions } from './evaluator';
/** /**
* Class with methods to manage the policy 'principal' validation * Class with methods to manage the policy 'principal' validation
*/ */
class Principal { export default class Principal {
/** /**
* Function to evaluate conditions if needed * Function to evaluate conditions if needed
* *
@ -176,5 +176,3 @@ class Principal {
}; };
} }
} }
module.exports = Principal;

View File

@ -1,4 +1,4 @@
const ipCheck = require('../ipCheck'); import * as ipCheck from '../ipCheck';
/** /**
* getClientIp - Gets the client IP from the request * getClientIp - Gets the client IP from the request
@ -6,7 +6,7 @@ const ipCheck = require('../ipCheck');
* @param {object} s3config - s3 config * @param {object} s3config - s3 config
* @return {string} - returns client IP from the request * @return {string} - returns client IP from the request
*/ */
function getClientIp(request, s3config) { export function getClientIp(request, s3config) {
const requestConfig = s3config ? s3config.requests : {}; const requestConfig = s3config ? s3config.requests : {};
const remoteAddress = request.socket.remoteAddress; const remoteAddress = request.socket.remoteAddress;
const clientIp = requestConfig ? remoteAddress : request.headers['x-forwarded-for'] || remoteAddress; const clientIp = requestConfig ? remoteAddress : request.headers['x-forwarded-for'] || remoteAddress;
@ -26,7 +26,3 @@ function getClientIp(request, s3config) {
} }
return clientIp; return clientIp;
} }
module.exports = {
getClientIp,
};

View File

@ -1,4 +1,4 @@
const sharedActionMap = { export const sharedActionMap = {
bucketDelete: 's3:DeleteBucket', bucketDelete: 's3:DeleteBucket',
// the "s3:PutEncryptionConfiguration" action also governs DELETE // the "s3:PutEncryptionConfiguration" action also governs DELETE
bucketDeleteEncryption: 's3:PutEncryptionConfiguration', bucketDeleteEncryption: 's3:PutEncryptionConfiguration',
@ -47,7 +47,7 @@ const sharedActionMap = {
}; };
// action map used for request context // action map used for request context
const actionMapRQ = Object.assign({ export const actionMapRQ = Object.assign({
bucketPut: 's3:CreateBucket', bucketPut: 's3:CreateBucket',
// for bucketDeleteCors need s3:PutBucketCORS permission // for bucketDeleteCors need s3:PutBucketCORS permission
// see http://docs.aws.amazon.com/AmazonS3/latest/API/ // see http://docs.aws.amazon.com/AmazonS3/latest/API/
@ -73,15 +73,15 @@ const actionMapRQ = Object.assign({
}, sharedActionMap); }, sharedActionMap);
// action map used for bucket policies // action map used for bucket policies
const actionMapBP = Object.assign({}, sharedActionMap); export const actionMapBP = Object.assign({}, sharedActionMap);
// action map for all relevant s3 actions // action map for all relevant s3 actions
const actionMapS3 = Object.assign({ export const actionMapS3 = Object.assign({
bucketGetNotification: 's3:GetBucketNotification', bucketGetNotification: 's3:GetBucketNotification',
bucketPutNotification: 's3:PutBucketNotification', bucketPutNotification: 's3:PutBucketNotification',
}, sharedActionMap, actionMapRQ, actionMapBP); }, sharedActionMap, actionMapRQ, actionMapBP);
const actionMonitoringMapS3 = { export const actionMonitoringMapS3 = {
bucketDelete: 'DeleteBucket', bucketDelete: 'DeleteBucket',
bucketDeleteCors: 'DeleteBucketCors', bucketDeleteCors: 'DeleteBucketCors',
bucketDeleteEncryption: 'DeleteBucketEncryption', bucketDeleteEncryption: 'DeleteBucketEncryption',
@ -139,7 +139,7 @@ const actionMonitoringMapS3 = {
serviceGet: 'ListBuckets', serviceGet: 'ListBuckets',
}; };
const actionMapIAM = { export const actionMapIAM = {
attachGroupPolicy: 'iam:AttachGroupPolicy', attachGroupPolicy: 'iam:AttachGroupPolicy',
attachUserPolicy: 'iam:AttachUserPolicy', attachUserPolicy: 'iam:AttachUserPolicy',
createAccessKey: 'iam:CreateAccessKey', createAccessKey: 'iam:CreateAccessKey',
@ -178,26 +178,15 @@ const actionMapIAM = {
getCredentialReport: 'iam:GetCredentialReport', getCredentialReport: 'iam:GetCredentialReport',
}; };
const actionMapSSO = { export const actionMapSSO = {
SsoAuthorize: 'sso:Authorize', SsoAuthorize: 'sso:Authorize',
}; };
const actionMapSTS = { export const actionMapSTS = {
assumeRole: 'sts:AssumeRole', assumeRole: 'sts:AssumeRole',
}; };
const actionMapMetadata = { export const actionMapMetadata = {
admin: 'metadata:admin', admin: 'metadata:admin',
default: 'metadata:bucketd', default: 'metadata:bucketd',
}; };
module.exports = {
actionMapRQ,
actionMapBP,
actionMapS3,
actionMonitoringMapS3,
actionMapIAM,
actionMapSSO,
actionMapSTS,
actionMapMetadata,
};

Some files were not shown because too many files have changed in this diff Show More