Compare commits

..

10 Commits

Author SHA1 Message Date
Guillaume Hivert f7a9c172cf ARSN-69 Remove ignore of build for NPM
Installing from git sources for dependents produced only an index.js 
file. It was due to .gitignore ignoring the build folder and npm/yarn 
removing the ignored files after install. Adding an empty .npmignore 
solves the problem. This can be found here: 
https://stackoverflow.com/questions/61754026/installing-npm-package-with-prepare-script-from-yarn-produces-only-index-js
2022-02-25 13:26:53 +01:00
Guillaume Hivert a40ad3d604 ARSN-69 Fix prepare 2022-02-25 12:02:35 +01:00
Guillaume Hivert 17d14c918a ARSN-69 Add ts-jest to beautify stacktraces with proper TS files 2022-02-25 11:29:37 +01:00
Guillaume Hivert 9aad788f16 ARSN-69 Ask CI to ignore TS errors while migration in progress 2022-02-24 17:28:38 +01:00
Guillaume Hivert d6cea8e318 ARSN-69 Upload Artifacts 2022-02-24 17:28:17 +01:00
Guillaume Hivert 451b40164c ARSN-69 Add prepare script to provide pure-JS implementation for dependents 2022-02-24 16:57:31 +01:00
Guillaume Hivert b884d38c61 ARSN-69 Make test suite great again 2022-02-24 16:57:15 +01:00
Guillaume Hivert 34c0f22d73 ARSN-69 Add types for dependencies utf8 and level and fix xml conversion 2022-02-24 16:27:11 +01:00
Guillaume Hivert 0359f7d4b4 ARSN-69 Replace remaining commonjs require with ES Modules import/export 2022-02-24 16:27:11 +01:00
Guillaume Hivert 1687221bba ARSN-69 Switch all sources to TypeScript 2022-02-24 16:26:51 +01:00
276 changed files with 3596 additions and 3394 deletions

View File

@ -32,6 +32,7 @@ jobs:
cache: 'yarn' cache: 'yarn'
- name: install dependencies - name: install dependencies
run: yarn cache clean && yarn install --frozen-lockfile run: yarn cache clean && yarn install --frozen-lockfile
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
- name: lint yaml - name: lint yaml
run: yarn --silent lint_yml run: yarn --silent lint_yml
- name: lint javascript - name: lint javascript
@ -47,3 +48,31 @@ jobs:
- name: run executables tests - name: run executables tests
run: yarn install && yarn test run: yarn install && yarn test
working-directory: 'lib/executables/pensieveCreds/' working-directory: 'lib/executables/pensieveCreds/'
compile:
name: Compile and upload build artifacts
needs: test
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install NodeJS
uses: actions/setup-node@v2
with:
node-version: '16'
cache: yarn
- name: Install dependencies
run: yarn cache clean && yarn install --frozen-lockfile
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
- name: Compile
run: yarn build
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
- name: Upload artifacts
uses: scality/action-artifacts@v2
with:
url: https://artifacts.scality.net
user: ${{ secrets.ARTIFACTS_USER }}
password: ${{ secrets.ARTIFACTS_PASSWORD }}
source: ./build
method: upload
if: success()

0
.npmignore Normal file
View File

6
babel.config.js Normal file
View File

@ -0,0 +1,6 @@
module.exports = {
presets: [
['@babel/preset-env', { targets: { node: 'current' } }],
'@babel/preset-typescript',
],
};

197
index.js
View File

@ -1,197 +0,0 @@
module.exports = {
auth: require('./lib/auth/auth'),
constants: require('./lib/constants'),
db: require('./lib/db'),
errors: require('./lib/errors.js'),
errorUtils: require('./lib/errorUtils'),
shuffle: require('./lib/shuffle'),
stringHash: require('./lib/stringHash'),
ipCheck: require('./lib/ipCheck'),
jsutil: require('./lib/jsutil'),
https: {
ciphers: require('./lib/https/ciphers.js'),
dhparam: require('./lib/https/dh2048.js'),
},
algorithms: {
list: require('./lib/algos/list/exportAlgos'),
listTools: {
DelimiterTools: require('./lib/algos/list/tools'),
},
cache: {
LRUCache: require('./lib/algos/cache/LRUCache'),
},
stream: {
MergeStream: require('./lib/algos/stream/MergeStream'),
},
SortedSet: require('./lib/algos/set/SortedSet'),
},
policies: {
evaluators: require('./lib/policyEvaluator/evaluator.js'),
validateUserPolicy: require('./lib/policy/policyValidator')
.validateUserPolicy,
evaluatePrincipal: require('./lib/policyEvaluator/principal'),
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
requestUtils: require('./lib/policyEvaluator/requestUtils'),
actionMaps: require('./lib/policyEvaluator/utils/actionMaps'),
},
Clustering: require('./lib/Clustering'),
testing: {
matrix: require('./lib/testing/matrix.js'),
},
versioning: {
VersioningConstants: require('./lib/versioning/constants.js')
.VersioningConstants,
Version: require('./lib/versioning/Version.js').Version,
VersionID: require('./lib/versioning/VersionID.js'),
},
network: {
http: {
server: require('./lib/network/http/server'),
},
rpc: require('./lib/network/rpc/rpc'),
level: require('./lib/network/rpc/level-net'),
rest: {
RESTServer: require('./lib/network/rest/RESTServer'),
RESTClient: require('./lib/network/rest/RESTClient'),
},
RoundRobin: require('./lib/network/RoundRobin'),
probe: {
ProbeServer: require('./lib/network/probe/ProbeServer'),
HealthProbeServer:
require('./lib/network/probe/HealthProbeServer.js'),
Utils: require('./lib/network/probe/Utils.js'),
},
kmip: require('./lib/network/kmip'),
kmipClient: require('./lib/network/kmip/Client'),
},
s3routes: {
routes: require('./lib/s3routes/routes'),
routesUtils: require('./lib/s3routes/routesUtils'),
},
s3middleware: {
userMetadata: require('./lib/s3middleware/userMetadata'),
convertToXml: require('./lib/s3middleware/convertToXml'),
escapeForXml: require('./lib/s3middleware/escapeForXml'),
objectLegalHold: require('./lib/s3middleware/objectLegalHold'),
tagging: require('./lib/s3middleware/tagging'),
checkDateModifiedHeaders:
require('./lib/s3middleware/validateConditionalHeaders')
.checkDateModifiedHeaders,
validateConditionalHeaders:
require('./lib/s3middleware/validateConditionalHeaders')
.validateConditionalHeaders,
MD5Sum: require('./lib/s3middleware/MD5Sum'),
NullStream: require('./lib/s3middleware/nullStream'),
objectUtils: require('./lib/s3middleware/objectUtils'),
azureHelper: {
mpuUtils:
require('./lib/s3middleware/azureHelpers/mpuUtils'),
ResultsCollector:
require('./lib/s3middleware/azureHelpers/ResultsCollector'),
SubStreamInterface:
require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
},
prepareStream: require('./lib/s3middleware/prepareStream'),
processMpuParts: require('./lib/s3middleware/processMpuParts'),
retention: require('./lib/s3middleware/objectRetention'),
lifecycleHelpers: require('./lib/s3middleware/lifecycleHelpers'),
},
storage: {
metadata: {
MetadataWrapper: require('./lib/storage/metadata/MetadataWrapper'),
bucketclient: {
BucketClientInterface:
require('./lib/storage/metadata/bucketclient/' +
'BucketClientInterface'),
LogConsumer:
require('./lib/storage/metadata/bucketclient/LogConsumer'),
},
file: {
BucketFileInterface:
require('./lib/storage/metadata/file/BucketFileInterface'),
MetadataFileServer:
require('./lib/storage/metadata/file/MetadataFileServer'),
MetadataFileClient:
require('./lib/storage/metadata/file/MetadataFileClient'),
},
inMemory: {
metastore:
require('./lib/storage/metadata/in_memory/metastore'),
metadata: require('./lib/storage/metadata/in_memory/metadata'),
bucketUtilities:
require('./lib/storage/metadata/in_memory/bucket_utilities'),
},
mongoclient: {
MongoClientInterface:
require('./lib/storage/metadata/mongoclient/' +
'MongoClientInterface'),
LogConsumer:
require('./lib/storage/metadata/mongoclient/LogConsumer'),
},
proxy: {
Server: require('./lib/storage/metadata/proxy/Server'),
},
},
data: {
DataWrapper: require('./lib/storage/data/DataWrapper'),
MultipleBackendGateway:
require('./lib/storage/data/MultipleBackendGateway'),
parseLC: require('./lib/storage/data/LocationConstraintParser'),
file: {
DataFileStore:
require('./lib/storage/data/file/DataFileStore'),
DataFileInterface:
require('./lib/storage/data/file/DataFileInterface'),
},
external: {
AwsClient: require('./lib/storage/data/external/AwsClient'),
AzureClient: require('./lib/storage/data/external/AzureClient'),
GcpClient: require('./lib/storage/data/external/GcpClient'),
GCP: require('./lib/storage/data/external/GCP/GcpService'),
GcpUtils: require('./lib/storage/data/external/GCP/GcpUtils'),
GcpSigner: require('./lib/storage/data/external/GCP/GcpSigner'),
PfsClient: require('./lib/storage/data/external/PfsClient'),
backendUtils: require('./lib/storage/data/external/utils'),
},
inMemory: {
datastore: require('./lib/storage/data/in_memory/datastore'),
},
},
utils: require('./lib/storage/utils'),
},
models: {
BackendInfo: require('./lib/models/BackendInfo'),
BucketInfo: require('./lib/models/BucketInfo'),
BucketAzureInfo: require('./lib/models/BucketAzureInfo'),
ObjectMD: require('./lib/models/ObjectMD'),
ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
ObjectMDAzureInfo: require('./lib/models/ObjectMDAzureInfo'),
ARN: require('./lib/models/ARN'),
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
ReplicationConfiguration:
require('./lib/models/ReplicationConfiguration'),
LifecycleConfiguration:
require('./lib/models/LifecycleConfiguration'),
LifecycleRule: require('./lib/models/LifecycleRule'),
BucketPolicy: require('./lib/models/BucketPolicy'),
ObjectLockConfiguration:
require('./lib/models/ObjectLockConfiguration'),
NotificationConfiguration:
require('./lib/models/NotificationConfiguration'),
},
metrics: {
StatsClient: require('./lib/metrics/StatsClient'),
StatsModel: require('./lib/metrics/StatsModel'),
RedisClient: require('./lib/metrics/RedisClient'),
ZenkoMetrics: require('./lib/metrics/ZenkoMetrics'),
},
pensieve: {
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
},
stream: {
readJSONStreamObject: require('./lib/stream/readJSONStreamObject'),
},
patches: {
locationConstraints: require('./lib/patches/locationConstraints'),
},
};

76
index.ts Normal file
View File

@ -0,0 +1,76 @@
// Exports
export * as auth from './lib/auth/auth';
export * as constants from './lib/constants';
export * as db from './lib/db';
export { default as errors } from './lib/errors';
export * as errorUtils from './lib/errorUtils';
export { default as shuffle } from './lib/shuffle';
export { default as stringHash } from './lib/stringHash';
export * as ipCheck from './lib/ipCheck';
export * as jsutil from './lib/jsutil';
export * as https from './lib/https';
export { default as Clustering } from './lib/Clustering';
export * as algorithms from './lib/algos';
export * as policies from './lib/policyEvaluator';
export * as testing from './lib/testing';
export * as versioning from './lib/versioning';
export * as network from './lib/network';
export * as s3routes from './lib/s3routes';
export * as s3middleware from './lib/s3middleware';
export * as models from './lib/models';
export * as metrics from './lib/metrics';
export * as stream from './lib/stream';
export const pensieve = {
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
};
export const storage = {
metadata: {
MetadataWrapper: require('./lib/storage/metadata/MetadataWrapper'),
bucketclient: {
BucketClientInterface: require('./lib/storage/metadata/bucketclient/BucketClientInterface'),
LogConsumer: require('./lib/storage/metadata/bucketclient/LogConsumer'),
},
file: {
BucketFileInterface: require('./lib/storage/metadata/file/BucketFileInterface'),
MetadataFileServer: require('./lib/storage/metadata/file/MetadataFileServer'),
MetadataFileClient: require('./lib/storage/metadata/file/MetadataFileClient'),
},
inMemory: {
metastore: require('./lib/storage/metadata/in_memory/metastore'),
metadata: require('./lib/storage/metadata/in_memory/metadata'),
bucketUtilities: require('./lib/storage/metadata/in_memory/bucket_utilities'),
},
mongoclient: {
MongoClientInterface: require('./lib/storage/metadata/mongoclient/MongoClientInterface'),
LogConsumer: require('./lib/storage/metadata/mongoclient/LogConsumer'),
},
proxy: {
Server: require('./lib/storage/metadata/proxy/Server'),
},
},
data: {
DataWrapper: require('./lib/storage/data/DataWrapper'),
MultipleBackendGateway: require('./lib/storage/data/MultipleBackendGateway'),
parseLC: require('./lib/storage/data/LocationConstraintParser'),
file: {
DataFileStore: require('./lib/storage/data/file/DataFileStore'),
DataFileInterface: require('./lib/storage/data/file/DataFileInterface'),
},
external: {
AwsClient: require('./lib/storage/data/external/AwsClient'),
AzureClient: require('./lib/storage/data/external/AzureClient'),
GcpClient: require('./lib/storage/data/external/GcpClient'),
GCP: require('./lib/storage/data/external/GCP/GcpService'),
GcpUtils: require('./lib/storage/data/external/GCP/GcpUtils'),
GcpSigner: require('./lib/storage/data/external/GCP/GcpSigner'),
PfsClient: require('./lib/storage/data/external/PfsClient'),
backendUtils: require('./lib/storage/data/external/utils'),
},
inMemory: {
datastore: require('./lib/storage/data/in_memory/datastore'),
},
},
utils: require('./lib/storage/utils'),
};

View File

@ -1,8 +1,6 @@
'use strict'; // eslint-disable-line import * as cluster from 'cluster';
const cluster = require('cluster'); export default class Clustering {
class Clustering {
/** /**
* Constructor * Constructor
* *
@ -259,5 +257,3 @@ class Clustering {
}); });
} }
} }
module.exports = Clustering;

View File

@ -1,4 +1,4 @@
const assert = require('assert'); import assert from 'assert';
/** /**
* @class * @class
@ -6,13 +6,19 @@ const assert = require('assert');
* number of items and a Least Recently Used (LRU) strategy for * number of items and a Least Recently Used (LRU) strategy for
* eviction. * eviction.
*/ */
class LRUCache { export default class LRUCache {
_maxEntries;
_entryMap;
_entryCount;
_lruTail;
_lruHead;
/** /**
* @constructor * @constructor
* @param {number} maxEntries - maximum number of entries kept in * @param maxEntries - maximum number of entries kept in
* the cache * the cache
*/ */
constructor(maxEntries) { constructor(maxEntries: number) {
assert(maxEntries >= 1); assert(maxEntries >= 1);
this._maxEntries = maxEntries; this._maxEntries = maxEntries;
this.clear(); this.clear();
@ -22,12 +28,12 @@ class LRUCache {
* Add or update the value associated to a key in the cache, * Add or update the value associated to a key in the cache,
* making it the most recently accessed for eviction purpose. * making it the most recently accessed for eviction purpose.
* *
* @param {string} key - key to add * @param key - key to add
* @param {object} value - associated value (can be of any type) * @param value - associated value (can be of any type)
* @return {boolean} true if the cache contained an entry with * @return true if the cache contained an entry with
* this key, false if it did not * this key, false if it did not
*/ */
add(key, value) { add(key: string, value): boolean {
let entry = this._entryMap[key]; let entry = this._entryMap[key];
if (entry) { if (entry) {
entry.value = value; entry.value = value;
@ -54,12 +60,12 @@ class LRUCache {
* Get the value associated to a key in the cache, making it the * Get the value associated to a key in the cache, making it the
* most recently accessed for eviction purpose. * most recently accessed for eviction purpose.
* *
* @param {string} key - key of which to fetch the associated value * @param key - key of which to fetch the associated value
* @return {object|undefined} - returns the associated value if * @return returns the associated value if
* exists in the cache, or undefined if not found - either if the * exists in the cache, or undefined if not found - either if the
* key was never added or if it has been evicted from the cache. * key was never added or if it has been evicted from the cache.
*/ */
get(key) { get(key: string) {
const entry = this._entryMap[key]; const entry = this._entryMap[key];
if (entry) { if (entry) {
// make the entry the most recently used by re-pushing it // make the entry the most recently used by re-pushing it
@ -74,12 +80,12 @@ class LRUCache {
/** /**
* Remove an entry from the cache if exists * Remove an entry from the cache if exists
* *
* @param {string} key - key to remove * @param key - key to remove
* @return {boolean} true if an entry has been removed, false if * @return true if an entry has been removed, false if
* there was no entry with this key in the cache - either if the * there was no entry with this key in the cache - either if the
* key was never added or if it has been evicted from the cache. * key was never added or if it has been evicted from the cache.
*/ */
remove(key) { remove(key: string): boolean {
const entry = this._entryMap[key]; const entry = this._entryMap[key];
if (entry) { if (entry) {
this._removeEntry(entry); this._removeEntry(entry);
@ -91,16 +97,14 @@ class LRUCache {
/** /**
* Get the current number of cached entries * Get the current number of cached entries
* *
* @return {number} current number of cached entries * @return current number of cached entries
*/ */
count() { count(): number {
return this._entryCount; return this._entryCount;
} }
/** /**
* Remove all entries from the cache * Remove all entries from the cache
*
* @return {undefined}
*/ */
clear() { clear() {
this._entryMap = {}; this._entryMap = {};
@ -113,8 +117,7 @@ class LRUCache {
* Push an entry to the front of the LRU list, making it the most * Push an entry to the front of the LRU list, making it the most
* recently accessed * recently accessed
* *
* @param {object} entry - entry to push * @param entry - entry to push
* @return {undefined}
*/ */
_lruPushEntry(entry) { _lruPushEntry(entry) {
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
@ -133,8 +136,7 @@ class LRUCache {
/** /**
* Remove an entry from the LRU list * Remove an entry from the LRU list
* *
* @param {object} entry - entry to remove * @param entry - entry to remove
* @return {undefined}
*/ */
_lruRemoveEntry(entry) { _lruRemoveEntry(entry) {
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
@ -154,8 +156,7 @@ class LRUCache {
/** /**
* Helper function to remove an existing entry from the cache * Helper function to remove an existing entry from the cache
* *
* @param {object} entry - cache entry to remove * @param entry - cache entry to remove
* @return {undefined}
*/ */
_removeEntry(entry) { _removeEntry(entry) {
this._lruRemoveEntry(entry); this._lruRemoveEntry(entry);
@ -163,5 +164,3 @@ class LRUCache {
this._entryCount -= 1; this._entryCount -= 1;
} }
} }
module.exports = LRUCache;

5
lib/algos/index.ts Normal file
View File

@ -0,0 +1,5 @@
export * as list from './list/exportAlgos';
export * as DelimiterTools from './list/tools';
export { default as LRUCache } from './cache/LRUCache';
export { default as MergeStream } from './stream/MergeStream';
export { default as SortedSet } from './set/SortedSet';

View File

@ -1,7 +1,4 @@
'use strict'; // eslint-disable-line strict import { FILTER_SKIP, SKIP_NONE } from './tools';
const { FILTER_SKIP, SKIP_NONE } = require('./tools');
// Use a heuristic to amortize the cost of JSON // Use a heuristic to amortize the cost of JSON
// serialization/deserialization only on largest metadata where the // serialization/deserialization only on largest metadata where the
// potential for size reduction is high, considering the bulk of the // potential for size reduction is high, considering the bulk of the
@ -22,7 +19,12 @@ const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
/** /**
* Base class of listing extensions. * Base class of listing extensions.
*/ */
class Extension { export default class Extension {
parameters;
logger;
res?: any[];
keys: number;
/** /**
* This takes a list of parameters and a logger as the inputs. * This takes a list of parameters and a logger as the inputs.
* Derivatives should have their own format regarding parameters. * Derivatives should have their own format regarding parameters.
@ -51,14 +53,14 @@ class Extension {
* heavy unused fields, or left untouched (depending on size * heavy unused fields, or left untouched (depending on size
* heuristics) * heuristics)
*/ */
trimMetadata(value) { trimMetadata(value: string): string {
let ret = undefined; let ret: any = undefined;
if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) { if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
try { try {
ret = JSON.parse(value); ret = JSON.parse(value);
delete ret.location; delete ret.location;
ret = JSON.stringify(ret); ret = JSON.stringify(ret);
} catch (e) { } catch (e: any) {
// Prefer returning an unfiltered data rather than // Prefer returning an unfiltered data rather than
// stopping the service in case of parsing failure. // stopping the service in case of parsing failure.
// The risk of this approach is a potential // The risk of this approach is a potential
@ -66,7 +68,8 @@ class Extension {
// used by repd. // used by repd.
this.logger.warn( this.logger.warn(
'Could not parse Object Metadata while listing', 'Could not parse Object Metadata while listing',
{ err: e.toString() }); { err: e.toString() }
);
} }
} }
return ret || value; return ret || value;
@ -96,7 +99,7 @@ class Extension {
* = 0: entry is accepted but not included (skipping) * = 0: entry is accepted but not included (skipping)
* < 0: entry is not accepted, listing should finish * < 0: entry is not accepted, listing should finish
*/ */
filter(entry) { filter(entry): number {
return entry ? FILTER_SKIP : FILTER_SKIP; return entry ? FILTER_SKIP : FILTER_SKIP;
} }
@ -105,20 +108,18 @@ class Extension {
* because it is skipping a range of delimited keys or a range of specific * because it is skipping a range of delimited keys or a range of specific
* version when doing master version listing. * version when doing master version listing.
* *
* @return {string} - the insight: a common prefix or a master key, * @return the insight: a common prefix or a master key,
* or SKIP_NONE if there is no insight * or SKIP_NONE if there is no insight
*/ */
skipping() { skipping(): string | undefined {
return SKIP_NONE; return SKIP_NONE;
} }
/** /**
* Get the listing resutls. Format depends on derivatives' specific logic. * Get the listing resutls. Format depends on derivatives' specific logic.
* @return {Array} - The listed elements * @return The listed elements
*/ */
result() { result() {
return this.res; return this.res;
} }
} }
module.exports.default = Extension;

View File

@ -1,9 +1,12 @@
'use strict'; // eslint-disable-line strict import {
inc,
const { inc, checkLimit, listingParamsMasterKeysV0ToV1, checkLimit,
FILTER_END, FILTER_ACCEPT } = require('./tools'); listingParamsMasterKeysV0ToV1,
FILTER_END,
FILTER_ACCEPT,
} from './tools';
const DEFAULT_MAX_KEYS = 1000; const DEFAULT_MAX_KEYS = 1000;
const VSConst = require('../../versioning/constants').VersioningConstants; import { VersioningConstants as VSConst } from '../../versioning/constants';
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst; const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
function numberDefault(num, defaultNum) { function numberDefault(num, defaultNum) {
@ -14,7 +17,22 @@ function numberDefault(num, defaultNum) {
/** /**
* Class for the MultipartUploads extension * Class for the MultipartUploads extension
*/ */
class MultipartUploads { export class MultipartUploads {
params
vFormat
CommonPrefixes
Uploads
IsTruncated
NextKeyMarker
NextUploadIdMarker
prefixLength
queryPrefixLength
keys
maxKeys
delimiter
splitter
logger
/** /**
* Constructor of the extension * Constructor of the extension
* Init and check parameters * Init and check parameters
@ -39,7 +57,9 @@ class MultipartUploads {
this.splitter = params.splitter; this.splitter = params.splitter;
this.logger = logger; this.logger = logger;
Object.assign(this, { Object.assign(
this,
{
[BucketVersioningKeyFormat.v0]: { [BucketVersioningKeyFormat.v0]: {
genMDParams: this.genMDParamsV0, genMDParams: this.genMDParamsV0,
getObjectKey: this.getObjectKeyV0, getObjectKey: this.getObjectKeyV0,
@ -48,13 +68,15 @@ class MultipartUploads {
genMDParams: this.genMDParamsV1, genMDParams: this.genMDParamsV1,
getObjectKey: this.getObjectKeyV1, getObjectKey: this.getObjectKeyV1,
}, },
}[this.vFormat]); }[this.vFormat]
);
} }
genMDParamsV0() { genMDParamsV0() {
const params = {}; const params = {};
if (this.params.keyMarker) { if (this.params.keyMarker) {
params.gt = `overview${this.params.splitter}` + params.gt =
`overview${this.params.splitter}` +
`${this.params.keyMarker}${this.params.splitter}`; `${this.params.keyMarker}${this.params.splitter}`;
if (this.params.uploadIdMarker) { if (this.params.uploadIdMarker) {
params.gt += `${this.params.uploadIdMarker}`; params.gt += `${this.params.uploadIdMarker}`;
@ -147,14 +169,20 @@ class MultipartUploads {
if (this.delimiter) { if (this.delimiter) {
const mpuPrefixSlice = `overview${this.splitter}`.length; const mpuPrefixSlice = `overview${this.splitter}`.length;
const mpuKey = key.slice(mpuPrefixSlice); const mpuKey = key.slice(mpuPrefixSlice);
const commonPrefixIndex = mpuKey.indexOf(this.delimiter, const commonPrefixIndex = mpuKey.indexOf(
this.queryPrefixLength); this.delimiter,
this.queryPrefixLength
);
if (commonPrefixIndex === -1) { if (commonPrefixIndex === -1) {
this.addUpload(value); this.addUpload(value);
} else { } else {
this.addCommonPrefix(mpuKey.substring(0, this.addCommonPrefix(
commonPrefixIndex + this.delimiter.length)); mpuKey.substring(
0,
commonPrefixIndex + this.delimiter.length
)
);
} }
} else { } else {
this.addUpload(value); this.addUpload(value);
@ -182,7 +210,3 @@ class MultipartUploads {
}; };
} }
} }
module.exports = {
MultipartUploads,
};

View File

@ -1,14 +1,17 @@
'use strict'; // eslint-disable-line strict import Extension from './Extension';
import { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } from './tools';
const Extension = require('./Extension').default;
const { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
const DEFAULT_MAX_KEYS = 10000; const DEFAULT_MAX_KEYS = 10000;
/** /**
* Class of an extension doing the simple listing * Class of an extension doing the simple listing
*/ */
class List extends Extension { export class List extends Extension {
maxKeys: number;
filterKey;
filterKeyStartsWith;
res: any[];
/** /**
* Constructor * Constructor
* Set the logger and the res * Set the logger and the res
@ -30,15 +33,17 @@ class List extends Extension {
} }
genMDParams() { genMDParams() {
const params = this.parameters ? { const params = this.parameters
? {
gt: this.parameters.gt, gt: this.parameters.gt,
gte: this.parameters.gte || this.parameters.start, gte: this.parameters.gte || this.parameters.start,
lt: this.parameters.lt, lt: this.parameters.lt,
lte: this.parameters.lte || this.parameters.end, lte: this.parameters.lte || this.parameters.end,
keys: this.parameters.keys, keys: this.parameters.keys,
values: this.parameters.values, values: this.parameters.values,
} : {}; }
Object.keys(params).forEach(key => { : {};
Object.keys(params).forEach((key) => {
if (params[key] === null || params[key] === undefined) { if (params[key] === null || params[key] === undefined) {
delete params[key]; delete params[key];
} }
@ -53,29 +58,30 @@ class List extends Extension {
* *
* @return {Boolean} Returns true if matches, else false. * @return {Boolean} Returns true if matches, else false.
*/ */
customFilter(value) { customFilter(value: string): boolean {
let _value; let _value: any;
try { try {
_value = JSON.parse(value); _value = JSON.parse(value);
} catch (e) { } catch (e: any) {
// Prefer returning an unfiltered data rather than // Prefer returning an unfiltered data rather than
// stopping the service in case of parsing failure. // stopping the service in case of parsing failure.
// The risk of this approach is a potential // The risk of this approach is a potential
// reproduction of MD-692, where too much memory is // reproduction of MD-692, where too much memory is
// used by repd. // used by repd.
this.logger.warn( this.logger.warn('Could not parse Object Metadata while listing', {
'Could not parse Object Metadata while listing', err: e.toString(),
{ err: e.toString() }); });
return false; return false;
} }
if (_value.customAttributes !== undefined) { if (_value.customAttributes !== undefined) {
for (const key of Object.keys(_value.customAttributes)) { for (const key of Object.keys(_value.customAttributes)) {
if (this.filterKey !== undefined && if (this.filterKey !== undefined && key === this.filterKey) {
key === this.filterKey) {
return true; return true;
} }
if (this.filterKeyStartsWith !== undefined && if (
key.startsWith(this.filterKeyStartsWith)) { this.filterKeyStartsWith !== undefined &&
key.startsWith(this.filterKeyStartsWith)
) {
return true; return true;
} }
} }
@ -90,15 +96,17 @@ class List extends Extension {
* @return {number} - > 0 : continue listing * @return {number} - > 0 : continue listing
* < 0 : listing done * < 0 : listing done
*/ */
filter(elem) { filter(elem): number {
// Check first in case of maxkeys <= 0 // Check first in case of maxkeys <= 0
if (this.keys >= this.maxKeys) { if (this.keys >= this.maxKeys) {
return FILTER_END; return FILTER_END;
} }
if ((this.filterKey !== undefined || if (
(this.filterKey !== undefined ||
this.filterKeyStartsWith !== undefined) && this.filterKeyStartsWith !== undefined) &&
typeof elem === 'object' && typeof elem === 'object' &&
!this.customFilter(elem.value)) { !this.customFilter(elem.value)
) {
return FILTER_SKIP; return FILTER_SKIP;
} }
if (typeof elem === 'object') { if (typeof elem === 'object') {
@ -121,7 +129,3 @@ class List extends Extension {
return this.res; return this.res;
} }
} }
module.exports = {
List,
};

View File

@ -1,9 +1,12 @@
'use strict'; // eslint-disable-line strict import Extension from './Extension';
import {
const Extension = require('./Extension').default; inc,
const { inc, listingParamsMasterKeysV0ToV1, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools'); FILTER_END,
const VSConst = require('../../versioning/constants').VersioningConstants; FILTER_ACCEPT,
FILTER_SKIP,
} from './tools';
import { VersioningConstants as VSConst } from '../../versioning/constants';
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst; const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
/** /**
@ -14,7 +17,11 @@ const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
* @param {Number} delimiterIndex - 'folder' index in the path * @param {Number} delimiterIndex - 'folder' index in the path
* @return {String} - CommonPrefix * @return {String} - CommonPrefix
*/ */
function getCommonPrefix(key, delimiter, delimiterIndex) { function getCommonPrefix(
key: string,
delimiter: string,
delimiterIndex: number
): string {
return key.substring(0, delimiterIndex + delimiter.length); return key.substring(0, delimiterIndex + delimiter.length);
} }
@ -30,7 +37,25 @@ function getCommonPrefix(key, delimiter, delimiterIndex) {
* @prop {String|undefined} prefix - prefix per amazon format * @prop {String|undefined} prefix - prefix per amazon format
* @prop {Number} maxKeys - number of keys to list * @prop {Number} maxKeys - number of keys to list
*/ */
class Delimiter extends Extension { export class Delimiter extends Extension {
CommonPrefixes: string[];
Contents: string[];
IsTruncated: boolean;
NextMarker?: string;
keys: number;
delimiter?: string;
prefix?: string;
maxKeys: number;
marker;
startAfter;
continuationToken;
alphabeticalOrder;
vFormat;
NextContinuationToken;
startMarker;
continueMarker;
nextContinueMarker;
/** /**
* Create a new Delimiter instance * Create a new Delimiter instance
* @constructor * @constructor
@ -58,6 +83,7 @@ class Delimiter extends Extension {
constructor(parameters, logger, vFormat) { constructor(parameters, logger, vFormat) {
super(parameters, logger); super(parameters, logger);
// original listing parameters // original listing parameters
this.keys = 0;
this.delimiter = parameters.delimiter; this.delimiter = parameters.delimiter;
this.prefix = parameters.prefix; this.prefix = parameters.prefix;
this.marker = parameters.marker; this.marker = parameters.marker;
@ -65,8 +91,9 @@ class Delimiter extends Extension {
this.startAfter = parameters.startAfter; this.startAfter = parameters.startAfter;
this.continuationToken = parameters.continuationToken; this.continuationToken = parameters.continuationToken;
this.alphabeticalOrder = this.alphabeticalOrder =
typeof parameters.alphabeticalOrder !== 'undefined' ? typeof parameters.alphabeticalOrder !== 'undefined'
parameters.alphabeticalOrder : true; ? parameters.alphabeticalOrder
: true;
this.vFormat = vFormat || BucketVersioningKeyFormat.v0; this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
// results // results
@ -79,21 +106,28 @@ class Delimiter extends Extension {
this.startMarker = parameters.v2 ? 'startAfter' : 'marker'; this.startMarker = parameters.v2 ? 'startAfter' : 'marker';
this.continueMarker = parameters.v2 ? 'continuationToken' : 'marker'; this.continueMarker = parameters.v2 ? 'continuationToken' : 'marker';
this.nextContinueMarker = parameters.v2 ? this.nextContinueMarker = parameters.v2
'NextContinuationToken' : 'NextMarker'; ? 'NextContinuationToken'
: 'NextMarker';
if (this.delimiter !== undefined && if (
this.delimiter !== undefined &&
this[this.nextContinueMarker] !== undefined && this[this.nextContinueMarker] !== undefined &&
this[this.nextContinueMarker].startsWith(this.prefix || '')) { this[this.nextContinueMarker].startsWith(this.prefix || '')
const nextDelimiterIndex = ) {
this[this.nextContinueMarker].indexOf(this.delimiter, const nextDelimiterIndex = this[this.nextContinueMarker].indexOf(
this.prefix ? this.prefix.length : 0); this.delimiter,
this[this.nextContinueMarker] = this.prefix ? this.prefix.length : 0
this[this.nextContinueMarker].slice(0, nextDelimiterIndex + );
this.delimiter.length); this[this.nextContinueMarker] = this[this.nextContinueMarker].slice(
0,
nextDelimiterIndex + this.delimiter.length
);
} }
Object.assign(this, { Object.assign(
this,
{
[BucketVersioningKeyFormat.v0]: { [BucketVersioningKeyFormat.v0]: {
genMDParams: this.genMDParamsV0, genMDParams: this.genMDParamsV0,
getObjectKey: this.getObjectKeyV0, getObjectKey: this.getObjectKeyV0,
@ -104,11 +138,12 @@ class Delimiter extends Extension {
getObjectKey: this.getObjectKeyV1, getObjectKey: this.getObjectKeyV1,
skipping: this.skippingV1, skipping: this.skippingV1,
}, },
}[this.vFormat]); }[this.vFormat]
);
} }
genMDParamsV0() { genMDParamsV0() {
const params = {}; const params: { gte?: string; lt?: string; gt?: string } = {};
if (this.prefix) { if (this.prefix) {
params.gte = this.prefix; params.gte = this.prefix;
params.lt = inc(this.prefix); params.lt = inc(this.prefix);
@ -134,7 +169,7 @@ class Delimiter extends Extension {
* final state of the result if it is the case * final state of the result if it is the case
* @return {Boolean} - indicates if the iteration has to stop * @return {Boolean} - indicates if the iteration has to stop
*/ */
_reachedMaxKeys() { _reachedMaxKeys(): boolean {
if (this.keys >= this.maxKeys) { if (this.keys >= this.maxKeys) {
// In cases of maxKeys <= 0 -> IsTruncated = false // In cases of maxKeys <= 0 -> IsTruncated = false
this.IsTruncated = this.maxKeys > 0; this.IsTruncated = this.maxKeys > 0;
@ -151,7 +186,7 @@ class Delimiter extends Extension {
* @param {String} value - The value of the key * @param {String} value - The value of the key
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
addContents(key, value) { addContents(key: string, value: string): number {
if (this._reachedMaxKeys()) { if (this._reachedMaxKeys()) {
return FILTER_END; return FILTER_END;
} }
@ -161,11 +196,11 @@ class Delimiter extends Extension {
return FILTER_ACCEPT; return FILTER_ACCEPT;
} }
getObjectKeyV0(obj) { getObjectKeyV0(obj: { key: string }) {
return obj.key; return obj.key;
} }
getObjectKeyV1(obj) { getObjectKeyV1(obj: { key: string }) {
return obj.key.slice(DbPrefixes.Master.length); return obj.key.slice(DbPrefixes.Master.length);
} }
@ -180,13 +215,15 @@ class Delimiter extends Extension {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filter(obj) { filter(obj: { key: string; value: string }): number {
const key = this.getObjectKey(obj); const key = this.getObjectKey(obj);
const value = obj.value; const value = obj.value;
if ((this.prefix && !key.startsWith(this.prefix)) if (
|| (this.alphabeticalOrder (this.prefix && !key.startsWith(this.prefix)) ||
&& typeof this[this.nextContinueMarker] === 'string' (this.alphabeticalOrder &&
&& key <= this[this.nextContinueMarker])) { typeof this[this.nextContinueMarker] === 'string' &&
key <= this[this.nextContinueMarker])
) {
return FILTER_SKIP; return FILTER_SKIP;
} }
if (this.delimiter) { if (this.delimiter) {
@ -206,10 +243,12 @@ class Delimiter extends Extension {
* @param {Number} index - after prefix starting point * @param {Number} index - after prefix starting point
* @return {Boolean} - indicates if iteration should continue * @return {Boolean} - indicates if iteration should continue
*/ */
addCommonPrefix(key, index) { addCommonPrefix(key: string, index: number): boolean {
const commonPrefix = getCommonPrefix(key, this.delimiter, index); const commonPrefix = getCommonPrefix(key, this.delimiter, index);
if (this.CommonPrefixes.indexOf(commonPrefix) === -1 if (
&& this[this.nextContinueMarker] !== commonPrefix) { this.CommonPrefixes.indexOf(commonPrefix) === -1 &&
this[this.nextContinueMarker] !== commonPrefix
) {
if (this._reachedMaxKeys()) { if (this._reachedMaxKeys()) {
return FILTER_END; return FILTER_END;
} }
@ -228,7 +267,7 @@ class Delimiter extends Extension {
* @return {string} - the present range (NextMarker) if repd believes * @return {string} - the present range (NextMarker) if repd believes
* that it's enough and should move on * that it's enough and should move on
*/ */
skippingV0() { skippingV0(): string {
return this[this.nextContinueMarker]; return this[this.nextContinueMarker];
} }
@ -239,7 +278,7 @@ class Delimiter extends Extension {
* @return {string} - the present range (NextMarker) if repd believes * @return {string} - the present range (NextMarker) if repd believes
* that it's enough and should move on * that it's enough and should move on
*/ */
skippingV1() { skippingV1(): string {
return DbPrefixes.Master + this[this.nextContinueMarker]; return DbPrefixes.Master + this[this.nextContinueMarker];
} }
@ -261,14 +300,17 @@ class Delimiter extends Extension {
Delimiter: this.delimiter, Delimiter: this.delimiter,
}; };
if (this.parameters.v2) { if (this.parameters.v2) {
//
result.NextContinuationToken = this.IsTruncated result.NextContinuationToken = this.IsTruncated
? this.NextContinuationToken : undefined; ? this.NextContinuationToken
: undefined;
} else { } else {
result.NextMarker = (this.IsTruncated && this.delimiter) //
? this.NextMarker : undefined; result.NextMarker =
this.IsTruncated && this.delimiter
? this.NextMarker
: undefined;
} }
return result; return result;
} }
} }
module.exports = { Delimiter };

View File

@ -1,10 +1,8 @@
'use strict'; // eslint-disable-line strict import { Delimiter } from './delimiter';
import { Version } from '../../versioning/Version';
const Delimiter = require('./delimiter').Delimiter; import { VersioningConstants as VSConst } from '../../versioning/constants';
const Version = require('../../versioning/Version').Version;
const VSConst = require('../../versioning/constants').VersioningConstants;
const { BucketVersioningKeyFormat } = VSConst; const { BucketVersioningKeyFormat } = VSConst;
const { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools'); import { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } from './tools';
const VID_SEP = VSConst.VersionId.Separator; const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes } = VSConst; const { DbPrefixes } = VSConst;
@ -13,7 +11,11 @@ const { DbPrefixes } = VSConst;
* Handle object listing with parameters. This extends the base class Delimiter * Handle object listing with parameters. This extends the base class Delimiter
* to return the raw master versions of existing objects. * to return the raw master versions of existing objects.
*/ */
class DelimiterMaster extends Delimiter { export class DelimiterMaster extends Delimiter {
prvKey;
prvPHDKey;
inReplayPrefix;
/** /**
* Delimiter listing of master versions. * Delimiter listing of master versions.
* @param {Object} parameters - listing parameters * @param {Object} parameters - listing parameters
@ -34,7 +36,9 @@ class DelimiterMaster extends Delimiter {
this.prvPHDKey = undefined; this.prvPHDKey = undefined;
this.inReplayPrefix = false; this.inReplayPrefix = false;
Object.assign(this, { Object.assign(
this,
{
[BucketVersioningKeyFormat.v0]: { [BucketVersioningKeyFormat.v0]: {
filter: this.filterV0, filter: this.filterV0,
skipping: this.skippingV0, skipping: this.skippingV0,
@ -43,7 +47,8 @@ class DelimiterMaster extends Delimiter {
filter: this.filterV1, filter: this.filterV1,
skipping: this.skippingV1, skipping: this.skippingV1,
}, },
}[this.vFormat]); }[this.vFormat]
);
} }
/** /**
@ -58,7 +63,7 @@ class DelimiterMaster extends Delimiter {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filterV0(obj) { filterV0(obj: { key: string; value: string }): number {
let key = obj.key; let key = obj.key;
const value = obj.value; const value = obj.value;
@ -70,9 +75,11 @@ class DelimiterMaster extends Delimiter {
/* Skip keys not starting with the prefix or not alphabetically /* Skip keys not starting with the prefix or not alphabetically
* ordered. */ * ordered. */
if ((this.prefix && !key.startsWith(this.prefix)) if (
|| (typeof this[this.nextContinueMarker] === 'string' && (this.prefix && !key.startsWith(this.prefix)) ||
key <= this[this.nextContinueMarker])) { (typeof this[this.nextContinueMarker] === 'string' &&
key <= this[this.nextContinueMarker])
) {
return FILTER_SKIP; return FILTER_SKIP;
} }
@ -95,9 +102,12 @@ class DelimiterMaster extends Delimiter {
* NextMarker to the common prefix instead of the whole key * NextMarker to the common prefix instead of the whole key
* value. (TODO: remove this test once ZENKO-1048 is fixed) * value. (TODO: remove this test once ZENKO-1048 is fixed)
* */ * */
if (key === this.prvKey || key === this[this.nextContinueMarker] || if (
key === this.prvKey ||
key === this[this.nextContinueMarker] ||
(this.delimiter && (this.delimiter &&
key.startsWith(this[this.nextContinueMarker]))) { key.startsWith(this[this.nextContinueMarker]))
) {
/* master version already filtered */ /* master version already filtered */
return FILTER_SKIP; return FILTER_SKIP;
} }
@ -155,7 +165,7 @@ class DelimiterMaster extends Delimiter {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filterV1(obj) { filterV1(obj: { key: string; value: string }): number {
// Filtering master keys in v1 is simply listing the master // Filtering master keys in v1 is simply listing the master
// keys, as the state of version keys do not change the // keys, as the state of version keys do not change the
// result, so we can use Delimiter method directly. // result, so we can use Delimiter method directly.
@ -167,8 +177,9 @@ class DelimiterMaster extends Delimiter {
// next marker or next continuation token: // next marker or next continuation token:
// - foo/ : skipping foo/ // - foo/ : skipping foo/
// - foo : skipping foo. // - foo : skipping foo.
const index = this[this.nextContinueMarker]. const index = this[this.nextContinueMarker].lastIndexOf(
lastIndexOf(this.delimiter); this.delimiter
);
if (index === this[this.nextContinueMarker].length - 1) { if (index === this[this.nextContinueMarker].length - 1) {
return this[this.nextContinueMarker]; return this[this.nextContinueMarker];
} }
@ -192,5 +203,3 @@ class DelimiterMaster extends Delimiter {
return DbPrefixes.Master + skipTo; return DbPrefixes.Master + skipTo;
} }
} }
module.exports = { DelimiterMaster };

View File

@ -1,10 +1,13 @@
'use strict'; // eslint-disable-line strict import { Delimiter } from './delimiter';
import { Version } from '../../versioning/Version';
const Delimiter = require('./delimiter').Delimiter; import { VersioningConstants as VSConst } from '../../versioning/constants';
const Version = require('../../versioning/Version').Version; import {
const VSConst = require('../../versioning/constants').VersioningConstants; inc,
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = FILTER_END,
require('./tools'); FILTER_ACCEPT,
FILTER_SKIP,
SKIP_NONE,
} from './tools';
const VID_SEP = VSConst.VersionId.Separator; const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst; const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
@ -21,7 +24,16 @@ const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
* @prop {String|undefined} prefix - prefix per amazon format * @prop {String|undefined} prefix - prefix per amazon format
* @prop {Number} maxKeys - number of keys to list * @prop {Number} maxKeys - number of keys to list
*/ */
class DelimiterVersions extends Delimiter { export class DelimiterVersions extends Delimiter {
CommonPrefixes: string[];
Contents: string[];
IsTruncated: boolean;
NextMarker?: string;
keys: number;
delimiter?: string;
prefix?: string;
maxKeys: number;
constructor(parameters, logger, vFormat) { constructor(parameters, logger, vFormat) {
super(parameters, logger, vFormat); super(parameters, logger, vFormat);
// specific to version listing // specific to version listing
@ -35,7 +47,9 @@ class DelimiterVersions extends Delimiter {
this.NextVersionIdMarker = undefined; this.NextVersionIdMarker = undefined;
this.inReplayPrefix = false; this.inReplayPrefix = false;
Object.assign(this, { Object.assign(
this,
{
[BucketVersioningKeyFormat.v0]: { [BucketVersioningKeyFormat.v0]: {
genMDParams: this.genMDParamsV0, genMDParams: this.genMDParamsV0,
filter: this.filterV0, filter: this.filterV0,
@ -46,7 +60,8 @@ class DelimiterVersions extends Delimiter {
filter: this.filterV1, filter: this.filterV1,
skipping: this.skippingV1, skipping: this.skippingV1,
}, },
}[this.vFormat]); }[this.vFormat]
);
} }
genMDParamsV0() { genMDParamsV0() {
@ -63,9 +78,10 @@ class DelimiterVersions extends Delimiter {
if (this.parameters.versionIdMarker) { if (this.parameters.versionIdMarker) {
// versionIdMarker should always come with keyMarker // versionIdMarker should always come with keyMarker
// but may not be the other way around // but may not be the other way around
params.gt = this.parameters.keyMarker params.gt =
+ VID_SEP this.parameters.keyMarker +
+ this.parameters.versionIdMarker; VID_SEP +
this.parameters.versionIdMarker;
} else { } else {
params.gt = inc(this.parameters.keyMarker + VID_SEP); params.gt = inc(this.parameters.keyMarker + VID_SEP);
} }
@ -89,20 +105,27 @@ class DelimiterVersions extends Delimiter {
params[1].lt = inc(DbPrefixes.Version); // stop after the last version key params[1].lt = inc(DbPrefixes.Version); // stop after the last version key
} }
if (this.parameters.keyMarker) { if (this.parameters.keyMarker) {
if (params[1].gte <= DbPrefixes.Version + this.parameters.keyMarker) { if (
params[1].gte <=
DbPrefixes.Version + this.parameters.keyMarker
) {
delete params[0].gte; delete params[0].gte;
delete params[1].gte; delete params[1].gte;
params[0].gt = DbPrefixes.Master + inc(this.parameters.keyMarker + VID_SEP); params[0].gt =
DbPrefixes.Master +
inc(this.parameters.keyMarker + VID_SEP);
if (this.parameters.versionIdMarker) { if (this.parameters.versionIdMarker) {
// versionIdMarker should always come with keyMarker // versionIdMarker should always come with keyMarker
// but may not be the other way around // but may not be the other way around
params[1].gt = DbPrefixes.Version params[1].gt =
+ this.parameters.keyMarker DbPrefixes.Version +
+ VID_SEP this.parameters.keyMarker +
+ this.parameters.versionIdMarker; VID_SEP +
this.parameters.versionIdMarker;
} else { } else {
params[1].gt = DbPrefixes.Version params[1].gt =
+ inc(this.parameters.keyMarker + VID_SEP); DbPrefixes.Version +
inc(this.parameters.keyMarker + VID_SEP);
} }
} }
} }
@ -120,7 +143,7 @@ class DelimiterVersions extends Delimiter {
* * -1 if master key < version key * * -1 if master key < version key
* * 1 if master key > version key * * 1 if master key > version key
*/ */
compareObjects(masterObj, versionObj) { compareObjects(masterObj, versionObj): number {
const masterKey = masterObj.key.slice(DbPrefixes.Master.length); const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
const versionKey = versionObj.key.slice(DbPrefixes.Version.length); const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
return masterKey < versionKey ? -1 : 1; return masterKey < versionKey ? -1 : 1;
@ -136,7 +159,11 @@ class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the key * @param {String} obj.value - The value of the key
* @return {Boolean} - indicates if iteration should continue * @return {Boolean} - indicates if iteration should continue
*/ */
addContents(obj) { addContents(obj: {
key: string;
versionId: string;
value: string;
}): boolean {
if (this._reachedMaxKeys()) { if (this._reachedMaxKeys()) {
return FILTER_END; return FILTER_END;
} }
@ -163,7 +190,7 @@ class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filterV0(obj) { filterV0(obj: { key: string; value: string }): number {
if (obj.key.startsWith(DbPrefixes.Replay)) { if (obj.key.startsWith(DbPrefixes.Replay)) {
this.inReplayPrefix = true; this.inReplayPrefix = true;
return FILTER_SKIP; return FILTER_SKIP;
@ -189,12 +216,14 @@ class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filterV1(obj) { filterV1(obj: { key: string; value: string }): number {
// this function receives both M and V keys, but their prefix // this function receives both M and V keys, but their prefix
// length is the same so we can remove their prefix without // length is the same so we can remove their prefix without
// looking at the type of key // looking at the type of key
return this.filterCommon(obj.key.slice(DbPrefixes.Master.length), return this.filterCommon(
obj.value); obj.key.slice(DbPrefixes.Master.length),
obj.value
);
} }
filterCommon(key, value) { filterCommon(key, value) {
@ -207,14 +236,16 @@ class DelimiterVersions extends Delimiter {
if (versionIdIndex < 0) { if (versionIdIndex < 0) {
nonversionedKey = key; nonversionedKey = key;
this.masterKey = key; this.masterKey = key;
this.masterVersionId = this.masterVersionId = Version.from(value).getVersionId() || 'null';
Version.from(value).getVersionId() || 'null';
versionId = this.masterVersionId; versionId = this.masterVersionId;
} else { } else {
nonversionedKey = key.slice(0, versionIdIndex); nonversionedKey = key.slice(0, versionIdIndex);
versionId = key.slice(versionIdIndex + 1); versionId = key.slice(versionIdIndex + 1);
// skip a version key if it is the master version // skip a version key if it is the master version
if (this.masterKey === nonversionedKey && this.masterVersionId === versionId) { if (
this.masterKey === nonversionedKey &&
this.masterVersionId === versionId
) {
return FILTER_SKIP; return FILTER_SKIP;
} }
this.masterKey = undefined; this.masterKey = undefined;
@ -222,7 +253,10 @@ class DelimiterVersions extends Delimiter {
} }
if (this.delimiter) { if (this.delimiter) {
const baseIndex = this.prefix ? this.prefix.length : 0; const baseIndex = this.prefix ? this.prefix.length : 0;
const delimiterIndex = nonversionedKey.indexOf(this.delimiter, baseIndex); const delimiterIndex = nonversionedKey.indexOf(
this.delimiter,
baseIndex
);
if (delimiterIndex >= 0) { if (delimiterIndex >= 0) {
return this.addCommonPrefix(nonversionedKey, delimiterIndex); return this.addCommonPrefix(nonversionedKey, delimiterIndex);
} }
@ -249,8 +283,7 @@ class DelimiterVersions extends Delimiter {
return SKIP_NONE; return SKIP_NONE;
} }
// skip to the same object key in both M and V range listings // skip to the same object key in both M and V range listings
return [DbPrefixes.Master + skipV0, return [DbPrefixes.Master + skipV0, DbPrefixes.Version + skipV0];
DbPrefixes.Version + skipV0];
} }
/** /**
@ -269,11 +302,10 @@ class DelimiterVersions extends Delimiter {
Versions: this.Contents, Versions: this.Contents,
IsTruncated: this.IsTruncated, IsTruncated: this.IsTruncated,
NextKeyMarker: this.IsTruncated ? this.NextMarker : undefined, NextKeyMarker: this.IsTruncated ? this.NextMarker : undefined,
NextVersionIdMarker: this.IsTruncated ? NextVersionIdMarker: this.IsTruncated
this.NextVersionIdMarker : undefined, ? this.NextVersionIdMarker
: undefined,
Delimiter: this.delimiter, Delimiter: this.delimiter,
}; };
} }
} }
module.exports = { DelimiterVersions };

View File

@ -1,9 +0,0 @@
module.exports = {
Basic: require('./basic').List,
Delimiter: require('./delimiter').Delimiter,
DelimiterVersions: require('./delimiterVersions')
.DelimiterVersions,
DelimiterMaster: require('./delimiterMaster')
.DelimiterMaster,
MPU: require('./MPU').MultipartUploads,
};

View File

@ -0,0 +1,5 @@
export { List as Basic } from './basic';
export { Delimiter } from './delimiter';
export { DelimiterVersions } from './delimiterVersions';
export { DelimiterMaster } from './delimiterMaster';
export { MultipartUploads as MPU } from './MPU';

View File

@ -1,21 +1,25 @@
const assert = require('assert'); import assert from 'assert';
import { FILTER_END, FILTER_SKIP, SKIP_NONE } from './tools';
const { FILTER_END, FILTER_SKIP, SKIP_NONE } = require('./tools');
const MAX_STREAK_LENGTH = 100; const MAX_STREAK_LENGTH = 100;
/** /**
* Handle the filtering and the skip mechanism of a listing result. * Handle the filtering and the skip mechanism of a listing result.
*/ */
class Skip { export class Skip {
extension;
gteParams;
listingEndCb;
skipRangeCb;
streakLength;
/** /**
* @param {Object} params - skip parameters * @param {Object} params - skip parameters
* @param {Object} params.extension - delimiter extension used (required) * @param {Object} params.extension - delimiter extension used (required)
* @param {String} params.gte - current range gte (greater than or * @param {String} params.gte - current range gte (greater than or
* equal) used by the client code * equal) used by the client code
*/ */
constructor(params) { constructor(params: { extension: any; gte: string }) {
assert(params.extension); assert(params.extension);
this.extension = params.extension; this.extension = params.extension;
@ -47,7 +51,7 @@ class Skip {
* This function calls the listing end or the skip range callbacks if * This function calls the listing end or the skip range callbacks if
* needed. * needed.
*/ */
filter(entry) { filter(entry): void {
assert(this.listingEndCb); assert(this.listingEndCb);
assert(this.skipRangeCb); assert(this.skipRangeCb);
@ -56,8 +60,10 @@ class Skip {
if (filteringResult === FILTER_END) { if (filteringResult === FILTER_END) {
this.listingEndCb(); this.listingEndCb();
} else if (filteringResult === FILTER_SKIP } else if (
&& skippingRange !== SKIP_NONE) { filteringResult === FILTER_SKIP &&
skippingRange !== SKIP_NONE
) {
if (++this.streakLength >= MAX_STREAK_LENGTH) { if (++this.streakLength >= MAX_STREAK_LENGTH) {
const newRange = this._inc(skippingRange); const newRange = this._inc(skippingRange);
@ -73,7 +79,7 @@ class Skip {
} }
} }
_inc(str) { _inc(str: string) {
if (!str) { if (!str) {
return str; return str;
} }
@ -83,6 +89,3 @@ class Skip {
return `${str.slice(0, str.length - 1)}${lastCharNewValue}`; return `${str.slice(0, str.length - 1)}${lastCharNewValue}`;
} }
} }
module.exports = Skip;

View File

@ -1,10 +1,11 @@
const { DbPrefixes } = require('../../versioning/constants').VersioningConstants; import { VersioningConstants as VSConst } from '../../versioning/constants';
const { DbPrefixes } = VSConst
// constants for extensions // constants for extensions
const SKIP_NONE = undefined; // to be inline with the values of NextMarker export const SKIP_NONE = undefined; // to be inline with the values of NextMarker
const FILTER_ACCEPT = 1; export const FILTER_ACCEPT = 1;
const FILTER_SKIP = 0; export const FILTER_SKIP = 0;
const FILTER_END = -1; export const FILTER_END = -1;
/** /**
* This function check if number is valid * This function check if number is valid
@ -15,8 +16,8 @@ const FILTER_END = -1;
* @param {Number} limit - The limit to respect * @param {Number} limit - The limit to respect
* @return {Number} - The parsed number || limit * @return {Number} - The parsed number || limit
*/ */
function checkLimit(number, limit) { export function checkLimit(number: number, limit: number): number {
const parsed = Number.parseInt(number, 10); const parsed = Number.parseInt(number, 10)
const valid = !Number.isNaN(parsed) && (!limit || parsed <= limit); const valid = !Number.isNaN(parsed) && (!limit || parsed <= limit);
return valid ? parsed : limit; return valid ? parsed : limit;
} }
@ -28,7 +29,7 @@ function checkLimit(number, limit) {
* @return {string} - the incremented string * @return {string} - the incremented string
* or the input if it is not valid * or the input if it is not valid
*/ */
function inc(str) { export function inc(str: string): string {
return str ? (str.slice(0, str.length - 1) + return str ? (str.slice(0, str.length - 1) +
String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str; String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str;
} }
@ -40,7 +41,7 @@ function inc(str) {
* @param {object} v0params - listing parameters for v0 format * @param {object} v0params - listing parameters for v0 format
* @return {object} - listing parameters for v1 format * @return {object} - listing parameters for v1 format
*/ */
function listingParamsMasterKeysV0ToV1(v0params) { export function listingParamsMasterKeysV0ToV1(v0params: any): any {
const v1params = Object.assign({}, v0params); const v1params = Object.assign({}, v0params);
if (v0params.gt !== undefined) { if (v0params.gt !== undefined) {
v1params.gt = `${DbPrefixes.Master}${v0params.gt}`; v1params.gt = `${DbPrefixes.Master}${v0params.gt}`;
@ -58,13 +59,3 @@ function listingParamsMasterKeysV0ToV1(v0params) {
} }
return v1params; return v1params;
} }
module.exports = {
checkLimit,
inc,
listingParamsMasterKeysV0ToV1,
SKIP_NONE,
FILTER_END,
FILTER_SKIP,
FILTER_ACCEPT,
};

View File

@ -1,4 +1,4 @@
function indexOf(arr, value) { export function indexOf<T>(arr: T[], value: T) {
if (!arr.length) { if (!arr.length) {
return -1; return -1;
} }
@ -22,10 +22,10 @@ function indexOf(arr, value) {
return -1; return -1;
} }
function indexAtOrBelow(arr, value) { export function indexAtOrBelow<T>(arr: T[], value: T) {
let i; let i: number;
let lo; let lo: number;
let hi; let hi: number;
if (!arr.length || arr[0] > value) { if (!arr.length || arr[0] > value) {
return -1; return -1;
@ -52,7 +52,7 @@ function indexAtOrBelow(arr, value) {
/* /*
* perform symmetric diff in O(m + n) * perform symmetric diff in O(m + n)
*/ */
function symDiff(k1, k2, v1, v2, cb) { export function symDiff(k1, k2, v1, v2, cb) {
let i = 0; let i = 0;
let j = 0; let j = 0;
const n = k1.length; const n = k1.length;
@ -79,9 +79,3 @@ function symDiff(k1, k2, v1, v2, cb) {
j++; j++;
} }
} }
module.exports = {
indexOf,
indexAtOrBelow,
symDiff,
};

View File

@ -1,13 +1,12 @@
const ArrayUtils = require('./ArrayUtils'); import * as ArrayUtils from './ArrayUtils';
class SortedSet { export default class SortedSet<Key, Value> {
constructor(obj) { keys: Key[];
if (obj) { values: Value[];
this.keys = obj.keys;
this.values = obj.values; constructor(obj?: { keys: Key[]; values: Value[] }) {
} else { this.keys = obj?.keys ?? [];
this.clear(); this.values = obj?.values ?? [];
}
} }
clear() { clear() {
@ -19,7 +18,7 @@ class SortedSet {
return this.keys.length; return this.keys.length;
} }
set(key, value) { set(key: Key, value: Value) {
const index = ArrayUtils.indexAtOrBelow(this.keys, key); const index = ArrayUtils.indexAtOrBelow(this.keys, key);
if (this.keys[index] === key) { if (this.keys[index] === key) {
this.values[index] = value; this.values[index] = value;
@ -29,17 +28,17 @@ class SortedSet {
this.values.splice(index + 1, 0, value); this.values.splice(index + 1, 0, value);
} }
isSet(key) { isSet(key: Key) {
const index = ArrayUtils.indexOf(this.keys, key); const index = ArrayUtils.indexOf(this.keys, key);
return index >= 0; return index >= 0;
} }
get(key) { get(key: Key) {
const index = ArrayUtils.indexOf(this.keys, key); const index = ArrayUtils.indexOf(this.keys, key);
return index >= 0 ? this.values[index] : undefined; return index >= 0 ? this.values[index] : undefined;
} }
del(key) { del(key: Key) {
const index = ArrayUtils.indexOf(this.keys, key); const index = ArrayUtils.indexOf(this.keys, key);
if (index >= 0) { if (index >= 0) {
this.keys.splice(index, 1); this.keys.splice(index, 1);
@ -47,5 +46,3 @@ class SortedSet {
} }
} }
} }
module.exports = SortedSet;

View File

@ -1,7 +1,17 @@
const stream = require('stream'); import stream from 'stream';
class MergeStream extends stream.Readable { export default class MergeStream extends stream.Readable {
constructor(stream1, stream2, compare) { _compare: (a: any, b: any) => number;
_streams: [stream.Readable, stream.Readable];
_peekItems: [undefined | null, undefined | null];
_streamEof: [boolean, boolean];
_streamToResume: stream.Readable | null;
constructor(
stream1: stream.Readable,
stream2: stream.Readable,
compare: (a: any, b: any) => number
) {
super({ objectMode: true }); super({ objectMode: true });
this._compare = compare; this._compare = compare;
@ -16,13 +26,13 @@ class MergeStream extends stream.Readable {
this._streamEof = [false, false]; this._streamEof = [false, false];
this._streamToResume = null; this._streamToResume = null;
stream1.on('data', item => this._onItem(stream1, item, 0, 1)); stream1.on('data', (item) => this._onItem(stream1, item, 0, 1));
stream1.once('end', () => this._onEnd(stream1, 0, 1)); stream1.once('end', () => this._onEnd(stream1, 0, 1));
stream1.once('error', err => this._onError(stream1, err, 0, 1)); stream1.once('error', (err) => this._onError(stream1, err, 0, 1));
stream2.on('data', item => this._onItem(stream2, item, 1, 0)); stream2.on('data', (item) => this._onItem(stream2, item, 1, 0));
stream2.once('end', () => this._onEnd(stream2, 1, 0)); stream2.once('end', () => this._onEnd(stream2, 1, 0));
stream2.once('error', err => this._onError(stream2, err, 1, 0)); stream2.once('error', (err) => this._onError(stream2, err, 1, 0));
} }
_read() { _read() {
@ -41,7 +51,7 @@ class MergeStream extends stream.Readable {
callback(); callback();
} }
_onItem(myStream, myItem, myIndex, otherIndex) { _onItem(myStream: stream.Readable, myItem, myIndex, otherIndex) {
this._peekItems[myIndex] = myItem; this._peekItems[myIndex] = myItem;
const otherItem = this._peekItems[otherIndex]; const otherItem = this._peekItems[otherIndex];
if (otherItem === undefined) { if (otherItem === undefined) {
@ -69,7 +79,7 @@ class MergeStream extends stream.Readable {
return undefined; return undefined;
} }
_onEnd(myStream, myIndex, otherIndex) { _onEnd(myStream: stream.Readable, myIndex, otherIndex) {
this._streamEof[myIndex] = true; this._streamEof[myIndex] = true;
if (this._peekItems[myIndex] === undefined) { if (this._peekItems[myIndex] === undefined) {
this._peekItems[myIndex] = null; this._peekItems[myIndex] = null;
@ -94,7 +104,7 @@ class MergeStream extends stream.Readable {
return otherStream.resume(); return otherStream.resume();
} }
_onError(myStream, err, myIndex, otherIndex) { _onError(myStream: stream.Readable, err, myIndex, otherIndex) {
myStream.destroy(); myStream.destroy();
if (this._streams[otherIndex]) { if (this._streams[otherIndex]) {
this._streams[otherIndex].destroy(); this._streams[otherIndex].destroy();
@ -102,5 +112,3 @@ class MergeStream extends stream.Readable {
this.emit('error', err); this.emit('error', err);
} }
} }
module.exports = MergeStream;

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line strict import * as constants from '../constants';
const constants = require('../constants');
/** /**
* Class containing requester's information received from Vault * Class containing requester's information received from Vault
@ -9,7 +7,14 @@ const constants = require('../constants');
* @return {AuthInfo} an AuthInfo instance * @return {AuthInfo} an AuthInfo instance
*/ */
class AuthInfo { export default class AuthInfo {
arn
canonicalID
shortid
email
accountDisplayName
IAMdisplayName
constructor(objectFromVault) { constructor(objectFromVault) {
// amazon resource name for IAM user (if applicable) // amazon resource name for IAM user (if applicable)
this.arn = objectFromVault.arn; this.arn = objectFromVault.arn;
@ -50,13 +55,12 @@ class AuthInfo {
return this.canonicalID === constants.publicId; return this.canonicalID === constants.publicId;
} }
isRequesterAServiceAccount() { isRequesterAServiceAccount() {
return this.canonicalID.startsWith( return this.canonicalID.startsWith(`${constants.zenkoServiceAccount}/`);
`${constants.zenkoServiceAccount}/`);
} }
isRequesterThisServiceAccount(serviceName) { isRequesterThisServiceAccount(serviceName) {
return this.canonicalID === return (
`${constants.zenkoServiceAccount}/${serviceName}`; this.canonicalID ===
`${constants.zenkoServiceAccount}/${serviceName}`
);
} }
} }
module.exports = AuthInfo;

View File

@ -1,5 +1,5 @@
const errors = require('../errors'); import errors from '../errors';
const AuthInfo = require('./AuthInfo'); import AuthInfo from './AuthInfo';
/** vaultSignatureCb parses message from Vault and instantiates /** vaultSignatureCb parses message from Vault and instantiates
* @param {object} err - error from vault * @param {object} err - error from vault
@ -39,7 +39,10 @@ function vaultSignatureCb(err, authInfo, log, callback, streamingV4Params) {
* authentication backends. * authentication backends.
* @class Vault * @class Vault
*/ */
class Vault { export default class Vault {
client
implName
/** /**
* @constructor * @constructor
* @param {object} client - authentication backend or vault client * @param {object} client - authentication backend or vault client
@ -312,5 +315,3 @@ class Vault {
}); });
} }
} }
module.exports = Vault;

View File

@ -1,22 +1,20 @@
'use strict'; // eslint-disable-line strict import * as crypto from 'crypto';
import errors from '../errors';
const crypto = require('crypto'); import * as queryString from 'querystring';
const errors = require('../errors'); import AuthInfo from './AuthInfo';
const queryString = require('querystring'); import * as v2 from './v2/authV2';
const AuthInfo = require('./AuthInfo'); import * as v4 from './v4/authV4';
const v2 = require('./v2/authV2'); import * as constants from '../constants';
const v4 = require('./v4/authV4'); import constructStringToSignV2 from './v2/constructStringToSign';
const constants = require('../constants'); import constructStringToSignV4 from './v4/constructStringToSign';
const constructStringToSignV2 = require('./v2/constructStringToSign'); import { convertUTCtoISO8601 } from './v4/timeUtils';
const constructStringToSignV4 = require('./v4/constructStringToSign'); import * as vaultUtilities from './backends/in_memory/vaultUtilities';
const convertUTCtoISO8601 = require('./v4/timeUtils').convertUTCtoISO8601; import * as inMemoryBackend from './backends/in_memory/Backend';
const vaultUtilities = require('./backends/in_memory/vaultUtilities'); import validateAuthConfig from './backends/in_memory/validateAuthConfig';
const inMemoryBackend = require('./backends/in_memory/Backend'); import AuthLoader from './backends/in_memory/AuthLoader';
const validateAuthConfig = require('./backends/in_memory/validateAuthConfig'); import Vault from './Vault';
const AuthLoader = require('./backends/in_memory/AuthLoader'); import baseBackend from './backends/base';
const Vault = require('./Vault'); import chainBackend from './backends/ChainBackend';
const baseBackend = require('./backends/base');
const chainBackend = require('./backends/ChainBackend');
let vault = null; let vault = null;
const auth = {}; const auth = {};
@ -73,8 +71,9 @@ function extractParams(request, log, awsService, data) {
} else if (authHeader.startsWith('AWS4')) { } else if (authHeader.startsWith('AWS4')) {
version = 'v4'; version = 'v4';
} else { } else {
log.trace('invalid authorization security header', log.trace('invalid authorization security header', {
{ header: authHeader }); header: authHeader,
});
return { err: errors.AccessDenied }; return { err: errors.AccessDenied };
} }
} else if (data.Signature) { } else if (data.Signature) {
@ -88,8 +87,10 @@ function extractParams(request, log, awsService, data) {
// Here, either both values are set, or none is set // Here, either both values are set, or none is set
if (version !== null && method !== null) { if (version !== null && method !== null) {
if (!checkFunctions[version] || !checkFunctions[version][method]) { if (!checkFunctions[version] || !checkFunctions[version][method]) {
log.trace('invalid auth version or method', log.trace('invalid auth version or method', {
{ version, authMethod: method }); version,
authMethod: method,
});
return { err: errors.NotImplemented }; return { err: errors.NotImplemented };
} }
log.trace('identified auth method', { version, authMethod: method }); log.trace('identified auth method', { version, authMethod: method });
@ -121,10 +122,11 @@ function doAuth(request, log, cb, awsService, requestContexts) {
return cb(null, res.params); return cb(null, res.params);
} }
if (requestContexts) { if (requestContexts) {
requestContexts.forEach(requestContext => { requestContexts.forEach((requestContext) => {
requestContext.setAuthType(res.params.data.authType); requestContext.setAuthType(res.params.data.authType);
requestContext.setSignatureVersion(res.params requestContext.setSignatureVersion(
.data.signatureVersion); res.params.data.signatureVersion
);
requestContext.setSignatureAge(res.params.data.signatureAge); requestContext.setSignatureAge(res.params.data.signatureAge);
requestContext.setSecurityToken(res.params.data.securityToken); requestContext.setSecurityToken(res.params.data.securityToken);
}); });
@ -136,8 +138,12 @@ function doAuth(request, log, cb, awsService, requestContexts) {
return vault.authenticateV2Request(res.params, requestContexts, cb); return vault.authenticateV2Request(res.params, requestContexts, cb);
} }
if (res.params.version === 4) { if (res.params.version === 4) {
return vault.authenticateV4Request(res.params, requestContexts, cb, return vault.authenticateV4Request(
awsService); res.params,
requestContexts,
cb,
awsService
);
} }
log.error('authentication method not found', { log.error('authentication method not found', {
@ -160,16 +166,22 @@ function doAuth(request, log, cb, awsService, requestContexts) {
* are temporary credentials from STS * are temporary credentials from STS
* @return {undefined} * @return {undefined}
*/ */
function generateV4Headers(request, data, accessKey, secretKeyValue, function generateV4Headers(
awsService, proxyPath, sessionToken) { request,
data,
accessKey,
secretKeyValue,
awsService,
proxyPath,
sessionToken
) {
Object.assign(request, { headers: {} }); Object.assign(request, { headers: {} });
const amzDate = convertUTCtoISO8601(Date.now()); const amzDate = convertUTCtoISO8601(Date.now());
// get date without time // get date without time
const scopeDate = amzDate.slice(0, amzDate.indexOf('T')); const scopeDate = amzDate.slice(0, amzDate.indexOf('T'));
const region = 'us-east-1'; const region = 'us-east-1';
const service = awsService || 'iam'; const service = awsService || 'iam';
const credentialScope = const credentialScope = `${scopeDate}/${region}/${service}/aws4_request`;
`${scopeDate}/${region}/${service}/aws4_request`;
const timestamp = amzDate; const timestamp = amzDate;
const algorithm = 'AWS4-HMAC-SHA256'; const algorithm = 'AWS4-HMAC-SHA256';
@ -179,8 +191,10 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
encodeURIComponent, encodeURIComponent,
}); });
} }
const payloadChecksum = crypto.createHash('sha256') const payloadChecksum = crypto
.update(payload, 'binary').digest('hex'); .createHash('sha256')
.update(payload, 'binary')
.digest('hex');
request.setHeader('host', request._headers.host); request.setHeader('host', request._headers.host);
request.setHeader('x-amz-date', amzDate); request.setHeader('x-amz-date', amzDate);
request.setHeader('x-amz-content-sha256', payloadChecksum); request.setHeader('x-amz-content-sha256', payloadChecksum);
@ -191,47 +205,49 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
Object.assign(request.headers, request._headers); Object.assign(request.headers, request._headers);
const signedHeaders = Object.keys(request._headers) const signedHeaders = Object.keys(request._headers)
.filter(headerName => .filter(
headerName.startsWith('x-amz-') (headerName) =>
|| headerName.startsWith('x-scal-') headerName.startsWith('x-amz-') ||
|| headerName === 'host', headerName.startsWith('x-scal-') ||
).sort().join(';'); headerName === 'host'
const params = { request, signedHeaders, payloadChecksum, )
credentialScope, timestamp, query: data, .sort()
awsService: service, proxyPath }; .join(';');
const params = {
request,
signedHeaders,
payloadChecksum,
credentialScope,
timestamp,
query: data,
awsService: service,
proxyPath,
};
const stringToSign = constructStringToSignV4(params); const stringToSign = constructStringToSignV4(params);
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue, const signingKey = vaultUtilities.calculateSigningKey(
secretKeyValue,
region, region,
scopeDate, scopeDate,
service); service
const signature = crypto.createHmac('sha256', signingKey) );
.update(stringToSign, 'binary').digest('hex'); const signature = crypto
const authorizationHeader = `${algorithm} Credential=${accessKey}` + .createHmac('sha256', signingKey)
.update(stringToSign, 'binary')
.digest('hex');
const authorizationHeader =
`${algorithm} Credential=${accessKey}` +
`/${credentialScope}, SignedHeaders=${signedHeaders}, ` + `/${credentialScope}, SignedHeaders=${signedHeaders}, ` +
`Signature=${signature}`; `Signature=${signature}`;
request.setHeader('authorization', authorizationHeader); request.setHeader('authorization', authorizationHeader);
Object.assign(request, { headers: {} }); Object.assign(request, { headers: {} });
} }
module.exports = { export const server = { extractParams, doAuth };
setHandler: setAuthHandler, export const client = { generateV4Headers, constructStringToSignV2 };
server: { export const inMemory = {
extractParams,
doAuth,
},
client: {
generateV4Headers,
constructStringToSignV2,
},
inMemory: {
backend: inMemoryBackend, backend: inMemoryBackend,
validateAuthConfig, validateAuthConfig,
AuthLoader, AuthLoader,
},
backends: {
baseBackend,
chainBackend,
},
AuthInfo,
Vault,
}; };
export const backends = { baseBackend, chainBackend };
export { setAuthHandler as setHandler, AuthInfo, Vault };

View File

@ -1,10 +1,8 @@
'use strict'; // eslint-disable-line strict import assert from 'assert';
import async from 'async';
const assert = require('assert'); import errors from '../../errors';
const async = require('async'); import BaseBackend from './base';
const errors = require('../../errors');
const BaseBackend = require('./base');
/** /**
* Class that provides an authentication backend that will verify signatures * Class that provides an authentication backend that will verify signatures
@ -13,13 +11,15 @@ const BaseBackend = require('./base');
* *
* @class ChainBackend * @class ChainBackend
*/ */
class ChainBackend extends BaseBackend { export default class ChainBackend extends BaseBackend {
_clients: any[];
/** /**
* @constructor * @constructor
* @param {string} service - service id * @param {string} service - service id
* @param {object[]} clients - list of authentication backends or vault clients * @param {object[]} clients - list of authentication backends or vault clients
*/ */
constructor(service, clients) { constructor(service: string, clients: any[]) {
super(service); super(service);
assert(Array.isArray(clients) && clients.length > 0, 'invalid client list'); assert(Array.isArray(clients) && clients.length > 0, 'invalid client list');
@ -29,12 +29,11 @@ class ChainBackend extends BaseBackend {
typeof client.getCanonicalIds === 'function' && typeof client.getCanonicalIds === 'function' &&
typeof client.getEmailAddresses === 'function' && typeof client.getEmailAddresses === 'function' &&
typeof client.checkPolicies === 'function' && typeof client.checkPolicies === 'function' &&
typeof client.healthcheck === 'function', typeof client.healthcheck === 'function'
), 'invalid client: missing required auth backend methods'); ), 'invalid client: missing required auth backend methods');
this._clients = clients; this._clients = clients;
} }
/* /*
* try task against each client for one to be successful * try task against each client for one to be successful
*/ */
@ -55,7 +54,7 @@ class ChainBackend extends BaseBackend {
signatureFromRequest, signatureFromRequest,
accessKey, accessKey,
options, options,
done, done
), callback); ), callback);
} }
@ -67,14 +66,15 @@ class ChainBackend extends BaseBackend {
region, region,
scopeDate, scopeDate,
options, options,
done, done
), callback); ), callback);
} }
static _mergeObjects(objectResponses) { static _mergeObjects(objectResponses) {
return objectResponses.reduce( return objectResponses.reduce(
(retObj, resObj) => Object.assign(retObj, resObj.message.body), (retObj, resObj) => Object.assign(retObj, resObj.message.body),
{}); {}
);
} }
getCanonicalIds(emailAddresses, options, callback) { getCanonicalIds(emailAddresses, options, callback) {
@ -90,7 +90,8 @@ class ChainBackend extends BaseBackend {
body: ChainBackend._mergeObjects(res), body: ChainBackend._mergeObjects(res),
}, },
}); });
}); }
);
} }
getEmailAddresses(canonicalIDs, options, callback) { getEmailAddresses(canonicalIDs, options, callback) {
@ -105,7 +106,8 @@ class ChainBackend extends BaseBackend {
body: ChainBackend._mergeObjects(res), body: ChainBackend._mergeObjects(res),
}, },
}); });
}); }
);
} }
/* /*
@ -128,7 +130,7 @@ class ChainBackend extends BaseBackend {
}); });
}); });
return Object.keys(policyMap).map(key => { return Object.keys(policyMap).map((key) => {
const policyRes = { isAllowed: policyMap[key].isAllowed }; const policyRes = { isAllowed: policyMap[key].isAllowed };
if (policyMap[key].arn !== '') { if (policyMap[key].arn !== '') {
policyRes.arn = policyMap[key].arn; policyRes.arn = policyMap[key].arn;
@ -153,7 +155,7 @@ class ChainBackend extends BaseBackend {
requestContextParams, requestContextParams,
userArn, userArn,
options, options,
done, done
), (err, res) => { ), (err, res) => {
if (err) { if (err) {
return callback(err); return callback(err);
@ -171,13 +173,13 @@ class ChainBackend extends BaseBackend {
client.healthcheck(reqUid, (err, res) => done(null, { client.healthcheck(reqUid, (err, res) => done(null, {
error: !!err ? err : null, error: !!err ? err : null,
status: res, status: res,
}), })
), (err, res) => { ), (err, res) => {
if (err) { if (err) {
return callback(err); return callback(err);
} }
const isError = res.some(results => !!results.error); const isError = res.some((results) => !!results.error);
if (isError) { if (isError) {
return callback(errors.InternalError, res); return callback(errors.InternalError, res);
} }
@ -185,5 +187,3 @@ class ChainBackend extends BaseBackend {
}); });
} }
} }
module.exports = ChainBackend;

View File

@ -1,13 +1,13 @@
'use strict'; // eslint-disable-line strict import errors from '../../errors';
const errors = require('../../errors');
/** /**
* Base backend class * Base backend class
* *
* @class BaseBackend * @class BaseBackend
*/ */
class BaseBackend { export default class BaseBackend {
service
/** /**
* @constructor * @constructor
* @param {string} service - service identifer for construction arn * @param {string} service - service identifer for construction arn
@ -82,5 +82,3 @@ class BaseBackend {
return callback(null, { code: 200, message: 'OK' }); return callback(null, { code: 200, message: 'OK' });
} }
} }
module.exports = BaseBackend;

View File

@ -1,9 +1,9 @@
const fs = require('fs'); import * as fs from 'fs';
const glob = require('simple-glob'); import glob from 'simple-glob';
const joi = require('@hapi/joi'); import joi from '@hapi/joi';
const werelogs = require('werelogs'); import werelogs from 'werelogs';
const ARN = require('../../../models/ARN'); import ARN from '../../../models/ARN';
/** /**
* Load authentication information from files or pre-loaded account * Load authentication information from files or pre-loaded account
@ -11,27 +11,38 @@ const ARN = require('../../../models/ARN');
* *
* @class AuthLoader * @class AuthLoader
*/ */
class AuthLoader { export default class AuthLoader {
_log
_authData
_isValid
_joiKeysValidator
_joiValidator
constructor(logApi) { constructor(logApi) {
this._log = new (logApi || werelogs).Logger('S3'); this._log = new (logApi || werelogs).Logger('S3');
this._authData = { accounts: [] }; this._authData = { accounts: [] };
// null: unknown validity, true/false: valid or invalid // null: unknown validity, true/false: valid or invalid
this._isValid = null; this._isValid = null;
this._joiKeysValidator = joi.array() this._joiKeysValidator = joi
.array()
.items({ .items({
access: joi.string().required(), access: joi.string().required(),
secret: joi.string().required(), secret: joi.string().required(),
}) })
.required(); .required();
const accountsJoi = joi.array() const accountsJoi = joi
.array()
.items({ .items({
name: joi.string().required(), name: joi.string().required(),
email: joi.string().email().required(), email: joi.string().email().required(),
arn: joi.string().required(), arn: joi.string().required(),
canonicalID: joi.string().required(), canonicalID: joi.string().required(),
shortid: joi.string().regex(/^[0-9]{12}$/).required(), shortid: joi
.string()
.regex(/^[0-9]{12}$/)
.required(),
keys: this._joiKeysValidator, keys: this._joiKeysValidator,
// backward-compat // backward-compat
users: joi.array(), users: joi.array(),
@ -64,11 +75,12 @@ class AuthLoader {
* logging purpose * logging purpose
* @return {undefined} * @return {undefined}
*/ */
addAccounts(authData, filePath) { addAccounts(authData, filePath: string) {
const isValid = this._validateData(authData, filePath); const isValid = this._validateData(authData, filePath);
if (isValid) { if (isValid) {
this._authData.accounts = this._authData.accounts = this._authData.accounts.concat(
this._authData.accounts.concat(authData.accounts); authData.accounts
);
// defer validity checking when getting data to avoid // defer validity checking when getting data to avoid
// logging multiple times the errors (we need to validate // logging multiple times the errors (we need to validate
// all accounts at once to detect duplicate values) // all accounts at once to detect duplicate values)
@ -87,7 +99,7 @@ class AuthLoader {
* authentication info (see {@link addAccounts()} for format) * authentication info (see {@link addAccounts()} for format)
* @return {undefined} * @return {undefined}
*/ */
addFile(filePath) { addFile(filePath: string) {
const authData = JSON.parse(fs.readFileSync(filePath)); const authData = JSON.parse(fs.readFileSync(filePath));
this.addAccounts(authData, filePath); this.addAccounts(authData, filePath);
} }
@ -103,9 +115,9 @@ class AuthLoader {
* {@link addAccounts()} for JSON format. * {@link addAccounts()} for JSON format.
* @return {undefined} * @return {undefined}
*/ */
addFilesByGlob(globPattern) { addFilesByGlob(globPattern: string | string[]) {
const files = glob(globPattern); const files = glob(globPattern);
files.forEach(filePath => this.addFile(filePath)); files.forEach((filePath) => this.addFile(filePath));
} }
/** /**
@ -134,9 +146,10 @@ class AuthLoader {
return this.validate() ? this._authData : null; return this.validate() ? this._authData : null;
} }
_validateData(authData, filePath) { _validateData(authData, filePath?: string) {
const res = joi.validate(authData, this._joiValidator, const res = joi.validate(authData, this._joiValidator, {
{ abortEarly: false }); abortEarly: false,
});
if (res.error) { if (res.error) {
this._dumpJoiErrors(res.error.details, filePath); this._dumpJoiErrors(res.error.details, filePath);
return false; return false;
@ -144,19 +157,23 @@ class AuthLoader {
let allKeys = []; let allKeys = [];
let arnError = false; let arnError = false;
const validatedAuth = res.value; const validatedAuth = res.value;
validatedAuth.accounts.forEach(account => { validatedAuth.accounts.forEach((account) => {
// backward-compat: ignore arn if starts with 'aws:' and log a // backward-compat: ignore arn if starts with 'aws:' and log a
// warning // warning
if (account.arn.startsWith('aws:')) { if (account.arn.startsWith('aws:')) {
this._log.error( this._log.error(
'account must have a valid AWS ARN, legacy examples ' + 'account must have a valid AWS ARN, legacy examples ' +
'starting with \'aws:\' are not supported anymore. ' + "starting with 'aws:' are not supported anymore. " +
'Please convert to a proper account entry (see ' + 'Please convert to a proper account entry (see ' +
'examples at https://github.com/scality/S3/blob/' + 'examples at https://github.com/scality/S3/blob/' +
'master/conf/authdata.json). Also note that support ' + 'master/conf/authdata.json). Also note that support ' +
'for account users has been dropped.', 'for account users has been dropped.',
{ accountName: account.name, accountArn: account.arn, {
filePath }); accountName: account.name,
accountArn: account.arn,
filePath,
}
);
arnError = true; arnError = true;
return; return;
} }
@ -166,27 +183,33 @@ class AuthLoader {
'turning users into account entries (see examples at ' + 'turning users into account entries (see examples at ' +
'https://github.com/scality/S3/blob/master/conf/' + 'https://github.com/scality/S3/blob/master/conf/' +
'authdata.json)', 'authdata.json)',
{ accountName: account.name, accountArn: account.arn, {
filePath }); accountName: account.name,
accountArn: account.arn,
filePath,
}
);
arnError = true; arnError = true;
return; return;
} }
const arnObj = ARN.createFromString(account.arn); const arnObj = ARN.createFromString(account.arn);
if (arnObj.error) { if (arnObj.error) {
this._log.error( this._log.error('authentication config validation error', {
'authentication config validation error', reason: arnObj.error.description,
{ reason: arnObj.error.description, accountName: account.name,
accountName: account.name, accountArn: account.arn, accountArn: account.arn,
filePath }); filePath,
});
arnError = true; arnError = true;
return; return;
} }
if (!arnObj.isIAMAccount()) { if (!arnObj.isIAMAccount()) {
this._log.error( this._log.error('authentication config validation error', {
'authentication config validation error', reason: 'not an IAM account ARN',
{ reason: 'not an IAM account ARN', accountName: account.name,
accountName: account.name, accountArn: account.arn, accountArn: account.arn,
filePath }); filePath,
});
arnError = true; arnError = true;
return; return;
} }
@ -196,7 +219,9 @@ class AuthLoader {
return false; return false;
} }
const uniqueKeysRes = joi.validate( const uniqueKeysRes = joi.validate(
allKeys, this._joiKeysValidator.unique('access')); allKeys,
this._joiKeysValidator.unique('access')
);
if (uniqueKeysRes.error) { if (uniqueKeysRes.error) {
this._dumpJoiErrors(uniqueKeysRes.error.details, filePath); this._dumpJoiErrors(uniqueKeysRes.error.details, filePath);
return false; return false;
@ -205,7 +230,7 @@ class AuthLoader {
} }
_dumpJoiErrors(errors, filePath) { _dumpJoiErrors(errors, filePath) {
errors.forEach(err => { errors.forEach((err) => {
const logInfo = { item: err.path, filePath }; const logInfo = { item: err.path, filePath };
if (err.type === 'array.unique') { if (err.type === 'array.unique') {
logInfo.reason = `duplicate value '${err.context.path}'`; logInfo.reason = `duplicate value '${err.context.path}'`;
@ -214,10 +239,7 @@ class AuthLoader {
logInfo.reason = err.message; logInfo.reason = err.message;
logInfo.context = err.context; logInfo.context = err.context;
} }
this._log.error('authentication config validation error', this._log.error('authentication config validation error', logInfo);
logInfo);
}); });
} }
} }
module.exports = AuthLoader;

View File

@ -1,12 +1,8 @@
'use strict'; // eslint-disable-line strict import * as crypto from 'crypto';
import errors from '../../../errors';
const crypto = require('crypto'); import { calculateSigningKey, hashSignature } from './vaultUtilities';
import Indexer from './Indexer';
const errors = require('../../../errors'); import BaseBackend from '../base';
const calculateSigningKey = require('./vaultUtilities').calculateSigningKey;
const hashSignature = require('./vaultUtilities').hashSignature;
const Indexer = require('./Indexer');
const BaseBackend = require('../base');
function _formatResponse(userInfoToSend) { function _formatResponse(userInfoToSend) {
return { return {
@ -23,6 +19,9 @@ function _formatResponse(userInfoToSend) {
* @class InMemoryBackend * @class InMemoryBackend
*/ */
class InMemoryBackend extends BaseBackend { class InMemoryBackend extends BaseBackend {
indexer
formatResponse
/** /**
* @constructor * @constructor
* @param {string} service - service identifer for construction arn * @param {string} service - service identifer for construction arn
@ -36,15 +35,23 @@ class InMemoryBackend extends BaseBackend {
this.formatResponse = formatter; this.formatResponse = formatter;
} }
verifySignatureV2(stringToSign, signatureFromRequest, verifySignatureV2(
accessKey, options, callback) { stringToSign,
signatureFromRequest,
accessKey,
options,
callback
) {
const entity = this.indexer.getEntityByKey(accessKey); const entity = this.indexer.getEntityByKey(accessKey);
if (!entity) { if (!entity) {
return callback(errors.InvalidAccessKeyId); return callback(errors.InvalidAccessKeyId);
} }
const secretKey = this.indexer.getSecretKey(entity, accessKey); const secretKey = this.indexer.getSecretKey(entity, accessKey);
const reconstructedSig = const reconstructedSig = hashSignature(
hashSignature(stringToSign, secretKey, options.algo); stringToSign,
secretKey,
options.algo
);
if (signatureFromRequest !== reconstructedSig) { if (signatureFromRequest !== reconstructedSig) {
return callback(errors.SignatureDoesNotMatch); return callback(errors.SignatureDoesNotMatch);
} }
@ -58,16 +65,25 @@ class InMemoryBackend extends BaseBackend {
return callback(null, vaultReturnObject); return callback(null, vaultReturnObject);
} }
verifySignatureV4(stringToSign, signatureFromRequest, accessKey, verifySignatureV4(
region, scopeDate, options, callback) { stringToSign,
signatureFromRequest,
accessKey,
region,
scopeDate,
options,
callback
) {
const entity = this.indexer.getEntityByKey(accessKey); const entity = this.indexer.getEntityByKey(accessKey);
if (!entity) { if (!entity) {
return callback(errors.InvalidAccessKeyId); return callback(errors.InvalidAccessKeyId);
} }
const secretKey = this.indexer.getSecretKey(entity, accessKey); const secretKey = this.indexer.getSecretKey(entity, accessKey);
const signingKey = calculateSigningKey(secretKey, region, scopeDate); const signingKey = calculateSigningKey(secretKey, region, scopeDate);
const reconstructedSig = crypto.createHmac('sha256', signingKey) const reconstructedSig = crypto
.update(stringToSign, 'binary').digest('hex'); .createHmac('sha256', signingKey)
.update(stringToSign, 'binary')
.digest('hex');
if (signatureFromRequest !== reconstructedSig) { if (signatureFromRequest !== reconstructedSig) {
return callback(errors.SignatureDoesNotMatch); return callback(errors.SignatureDoesNotMatch);
} }
@ -83,14 +99,13 @@ class InMemoryBackend extends BaseBackend {
getCanonicalIds(emails, log, cb) { getCanonicalIds(emails, log, cb) {
const results = {}; const results = {};
emails.forEach(email => { emails.forEach((email) => {
const lowercasedEmail = email.toLowerCase(); const lowercasedEmail = email.toLowerCase();
const entity = this.indexer.getEntityByEmail(lowercasedEmail); const entity = this.indexer.getEntityByEmail(lowercasedEmail);
if (!entity) { if (!entity) {
results[email] = 'NotFound'; results[email] = 'NotFound';
} else { } else {
results[email] = results[email] = entity.canonicalID;
entity.canonicalID;
} }
}); });
const vaultReturnObject = { const vaultReturnObject = {
@ -103,7 +118,7 @@ class InMemoryBackend extends BaseBackend {
getEmailAddresses(canonicalIDs, options, cb) { getEmailAddresses(canonicalIDs, options, cb) {
const results = {}; const results = {};
canonicalIDs.forEach(canonicalId => { canonicalIDs.forEach((canonicalId) => {
const foundEntity = this.indexer.getEntityByCanId(canonicalId); const foundEntity = this.indexer.getEntityByCanId(canonicalId);
if (!foundEntity || !foundEntity.email) { if (!foundEntity || !foundEntity.email) {
results[canonicalId] = 'NotFound'; results[canonicalId] = 'NotFound';
@ -131,7 +146,7 @@ class InMemoryBackend extends BaseBackend {
*/ */
getAccountIds(canonicalIDs, options, cb) { getAccountIds(canonicalIDs, options, cb) {
const results = {}; const results = {};
canonicalIDs.forEach(canonicalID => { canonicalIDs.forEach((canonicalID) => {
const foundEntity = this.indexer.getEntityByCanId(canonicalID); const foundEntity = this.indexer.getEntityByCanId(canonicalID);
if (!foundEntity || !foundEntity.shortid) { if (!foundEntity || !foundEntity.shortid) {
results[canonicalID] = 'Not Found'; results[canonicalID] = 'Not Found';
@ -148,7 +163,6 @@ class InMemoryBackend extends BaseBackend {
} }
} }
class S3AuthBackend extends InMemoryBackend { class S3AuthBackend extends InMemoryBackend {
/** /**
* @constructor * @constructor

View File

@ -6,7 +6,7 @@
* *
* @class Indexer * @class Indexer
*/ */
class Indexer { export default class Indexer {
/** /**
* @constructor * @constructor
* @param {object} authdata - the authentication config file's data * @param {object} authdata - the authentication config file's data
@ -141,5 +141,3 @@ class Indexer {
return entity.accountDisplayName; return entity.accountDisplayName;
} }
} }
module.exports = Indexer;

View File

@ -1,4 +1,4 @@
const AuthLoader = require('./AuthLoader'); import AuthLoader from './AuthLoader';
/** /**
* @deprecated please use {@link AuthLoader} class instead * @deprecated please use {@link AuthLoader} class instead
@ -9,10 +9,8 @@ const AuthLoader = require('./AuthLoader');
* @return {boolean} true on erroneous data * @return {boolean} true on erroneous data
* false on success * false on success
*/ */
function validateAuthConfig(authdata, logApi) { export default function validateAuthConfig(authdata, logApi) {
const authLoader = new AuthLoader(logApi); const authLoader = new AuthLoader(logApi);
authLoader.addAccounts(authdata); authLoader.addAccounts(authdata);
return !authLoader.validate(); return !authLoader.validate();
} }
module.exports = validateAuthConfig;

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line strict import * as crypto from 'crypto';
const crypto = require('crypto');
/** hashSignature for v2 Auth /** hashSignature for v2 Auth
* @param {string} stringToSign - built string to sign per AWS rules * @param {string} stringToSign - built string to sign per AWS rules
@ -8,7 +6,11 @@ const crypto = require('crypto');
* @param {string} algorithm - either SHA256 or SHA1 * @param {string} algorithm - either SHA256 or SHA1
* @return {string} reconstructed signature * @return {string} reconstructed signature
*/ */
function hashSignature(stringToSign, secretKey, algorithm) { export function hashSignature(
stringToSign: string,
secretKey: string,
algorithm: 'SHA256' | 'SHA1'
): string {
const hmacObject = crypto.createHmac(algorithm, secretKey); const hmacObject = crypto.createHmac(algorithm, secretKey);
return hmacObject.update(stringToSign, 'binary').digest('base64'); return hmacObject.update(stringToSign, 'binary').digest('base64');
} }
@ -20,7 +22,12 @@ function hashSignature(stringToSign, secretKey, algorithm) {
* @param {string} [service] - To specify another service than s3 * @param {string} [service] - To specify another service than s3
* @return {string} signingKey - signingKey to calculate signature * @return {string} signingKey - signingKey to calculate signature
*/ */
function calculateSigningKey(secretKey, region, scopeDate, service) { export function calculateSigningKey(
secretKey: string,
region: string,
scopeDate: string,
service: string
): string {
const dateKey = crypto.createHmac('sha256', `AWS4${secretKey}`) const dateKey = crypto.createHmac('sha256', `AWS4${secretKey}`)
.update(scopeDate, 'binary').digest(); .update(scopeDate, 'binary').digest();
const dateRegionKey = crypto.createHmac('sha256', dateKey) const dateRegionKey = crypto.createHmac('sha256', dateKey)
@ -31,5 +38,3 @@ function calculateSigningKey(secretKey, region, scopeDate, service) {
.update('aws4_request', 'binary').digest(); .update('aws4_request', 'binary').digest();
return signingKey; return signingKey;
} }
module.exports = { hashSignature, calculateSigningKey };

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line strict export default function algoCheck(signatureLength) {
function algoCheck(signatureLength) {
let algo; let algo;
// If the signature sent is 44 characters, // If the signature sent is 44 characters,
// this means that sha256 was used: // this means that sha256 was used:
@ -15,5 +13,3 @@ function algoCheck(signatureLength) {
} }
return algo; return algo;
} }
module.exports = algoCheck;

View File

@ -1,11 +0,0 @@
'use strict'; // eslint-disable-line strict
const headerAuthCheck = require('./headerAuthCheck');
const queryAuthCheck = require('./queryAuthCheck');
const authV2 = {
header: headerAuthCheck,
query: queryAuthCheck,
};
module.exports = authV2;

2
lib/auth/v2/authV2.ts Normal file
View File

@ -0,0 +1,2 @@
export * as header from './headerAuthCheck';
export * as query from './queryAuthCheck';

View File

@ -1,9 +1,8 @@
'use strict'; // eslint-disable-line strict import errors from '../../errors';
const errors = require('../../errors');
const epochTime = new Date('1970-01-01').getTime(); const epochTime = new Date('1970-01-01').getTime();
function checkRequestExpiry(timestamp, log) { export default function checkRequestExpiry(timestamp, log) {
// If timestamp is before epochTime, the request is invalid and return // If timestamp is before epochTime, the request is invalid and return
// errors.AccessDenied // errors.AccessDenied
if (timestamp < epochTime) { if (timestamp < epochTime) {
@ -17,7 +16,7 @@ function checkRequestExpiry(timestamp, log) {
log.trace('request timestamp', { requestTimestamp: timestamp }); log.trace('request timestamp', { requestTimestamp: timestamp });
log.trace('current timestamp', { currentTimestamp: currentTime }); log.trace('current timestamp', { currentTimestamp: currentTime });
const fifteenMinutes = (15 * 60 * 1000); const fifteenMinutes = 15 * 60 * 1000;
if (currentTime - timestamp > fifteenMinutes) { if (currentTime - timestamp > fifteenMinutes) {
log.trace('request timestamp is not within 15 minutes of current time'); log.trace('request timestamp is not within 15 minutes of current time');
log.debug('request time too skewed', { timestamp }); log.debug('request time too skewed', { timestamp });
@ -32,5 +31,3 @@ function checkRequestExpiry(timestamp, log) {
return undefined; return undefined;
} }
module.exports = checkRequestExpiry;

View File

@ -1,11 +1,8 @@
'use strict'; // eslint-disable-line strict import utf8 from 'utf8';
import getCanonicalizedAmzHeaders from './getCanonicalizedAmzHeaders';
import getCanonicalizedResource from './getCanonicalizedResource';
const utf8 = require('utf8'); export default function constructStringToSign(request, data, log, clientType?: any) {
const getCanonicalizedAmzHeaders = require('./getCanonicalizedAmzHeaders');
const getCanonicalizedResource = require('./getCanonicalizedResource');
function constructStringToSign(request, data, log, clientType) {
/* /*
Build signature per AWS requirements: Build signature per AWS requirements:
StringToSign = HTTP-Verb + '\n' + StringToSign = HTTP-Verb + '\n' +
@ -23,11 +20,11 @@ function constructStringToSign(request, data, log, clientType) {
const contentMD5 = headers['content-md5'] ? const contentMD5 = headers['content-md5'] ?
headers['content-md5'] : query['Content-MD5']; headers['content-md5'] : query['Content-MD5'];
stringToSign += (contentMD5 ? `${contentMD5}\n` : '\n'); stringToSign += contentMD5 ? `${contentMD5}\n` : '\n';
const contentType = headers['content-type'] ? const contentType = headers['content-type'] ?
headers['content-type'] : query['Content-Type']; headers['content-type'] : query['Content-Type'];
stringToSign += (contentType ? `${contentType}\n` : '\n'); stringToSign += contentType ? `${contentType}\n` : '\n';
/* /*
AWS docs are conflicting on whether to include x-amz-date header here AWS docs are conflicting on whether to include x-amz-date header here
@ -42,5 +39,3 @@ function constructStringToSign(request, data, log, clientType) {
+ getCanonicalizedResource(request, clientType); + getCanonicalizedResource(request, clientType);
return utf8.encode(stringToSign); return utf8.encode(stringToSign);
} }
module.exports = constructStringToSign;

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line strict export default function getCanonicalizedAmzHeaders(headers, clientType) {
function getCanonicalizedAmzHeaders(headers, clientType) {
/* /*
Iterate through headers and pull any headers that are x-amz headers. Iterate through headers and pull any headers that are x-amz headers.
Need to include 'x-amz-date' here even though AWS docs Need to include 'x-amz-date' here even though AWS docs
@ -43,5 +41,3 @@ function getCanonicalizedAmzHeaders(headers, clientType) {
`${headerStr}${current[0]}:${current[1]}\n`, `${headerStr}${current[0]}:${current[1]}\n`,
''); '');
} }
module.exports = getCanonicalizedAmzHeaders;

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line strict import * as url from 'url';
const url = require('url');
const gcpSubresources = [ const gcpSubresources = [
'acl', 'acl',
@ -41,7 +39,7 @@ const awsSubresources = [
'website', 'website',
]; ];
function getCanonicalizedResource(request, clientType) { export default function getCanonicalizedResource(request, clientType) {
/* /*
This variable is used to determine whether to insert This variable is used to determine whether to insert
a '?' or '&'. Once a query parameter is added to the resourceString, a '?' or '&'. Once a query parameter is added to the resourceString,
@ -117,5 +115,3 @@ function getCanonicalizedResource(request, clientType) {
} }
return resourceString; return resourceString;
} }
module.exports = getCanonicalizedResource;

View File

@ -1,12 +1,10 @@
'use strict'; // eslint-disable-line strict import errors from '../../errors';
import * as constants from '../../constants';
import constructStringToSign from './constructStringToSign';
import checkRequestExpiry from './checkRequestExpiry';
import algoCheck from './algoCheck';
const errors = require('../../errors'); export function check(request, log, data) {
const constants = require('../../constants');
const constructStringToSign = require('./constructStringToSign');
const checkRequestExpiry = require('./checkRequestExpiry');
const algoCheck = require('./algoCheck');
function check(request, log, data) {
log.trace('running header auth check'); log.trace('running header auth check');
const headers = request.headers; const headers = request.headers;
@ -17,15 +15,19 @@ function check(request, log, data) {
} }
// Check to make sure timestamp is within 15 minutes of current time // Check to make sure timestamp is within 15 minutes of current time
let timestamp = headers['x-amz-date'] ? let timestamp = headers['x-amz-date']
headers['x-amz-date'] : headers.date; ? headers['x-amz-date']
: headers.date;
timestamp = Date.parse(timestamp); timestamp = Date.parse(timestamp);
if (!timestamp) { if (!timestamp) {
log.debug('missing or invalid date header', log.debug('missing or invalid date header', {
{ method: 'auth/v2/headerAuthCheck.check' }); method: 'auth/v2/headerAuthCheck.check',
return { err: errors.AccessDenied. });
customizeDescription('Authentication requires a valid Date or ' + return {
'x-amz-date header') }; err: errors.AccessDenied.customizeDescription(
'Authentication requires a valid Date or ' + 'x-amz-date header'
),
};
} }
const err = checkRequestExpiry(timestamp, log); const err = checkRequestExpiry(timestamp, log);
@ -46,8 +48,10 @@ function check(request, log, data) {
log.debug('invalid authorization header', { authInfo }); log.debug('invalid authorization header', { authInfo });
return { err: errors.InvalidArgument }; return { err: errors.InvalidArgument };
} }
const accessKey = semicolonIndex > 4 ? const accessKey =
authInfo.substring(4, semicolonIndex).trim() : undefined; semicolonIndex > 4
? authInfo.substring(4, semicolonIndex).trim()
: undefined;
if (typeof accessKey !== 'string' || accessKey.length === 0) { if (typeof accessKey !== 'string' || accessKey.length === 0) {
log.trace('invalid authorization header', { authInfo }); log.trace('invalid authorization header', { authInfo });
return { err: errors.MissingSecurityHeader }; return { err: errors.MissingSecurityHeader };
@ -80,5 +84,3 @@ function check(request, log, data) {
}, },
}; };
} }
module.exports = { check };

View File

@ -1,11 +1,9 @@
'use strict'; // eslint-disable-line strict import errors from '../../errors';
import * as constants from '../../constants';
import algoCheck from './algoCheck';
import constructStringToSign from './constructStringToSign';
const errors = require('../../errors'); export function check(request, log, data) {
const constants = require('../../constants');
const algoCheck = require('./algoCheck');
const constructStringToSign = require('./constructStringToSign');
function check(request, log, data) {
log.trace('running query auth check'); log.trace('running query auth check');
if (request.method === 'POST') { if (request.method === 'POST') {
log.debug('query string auth not supported for post requests'); log.debug('query string auth not supported for post requests');
@ -28,26 +26,28 @@ function check(request, log, data) {
*/ */
const expirationTime = parseInt(data.Expires, 10) * 1000; const expirationTime = parseInt(data.Expires, 10) * 1000;
if (Number.isNaN(expirationTime)) { if (Number.isNaN(expirationTime)) {
log.debug('invalid expires parameter', log.debug('invalid expires parameter', { expires: data.Expires });
{ expires: data.Expires });
return { err: errors.MissingSecurityHeader }; return { err: errors.MissingSecurityHeader };
} }
const currentTime = Date.now(); const currentTime = Date.now();
const preSignedURLExpiry = process.env.PRE_SIGN_URL_EXPIRY const preSignedURLExpiry =
&& !Number.isNaN(process.env.PRE_SIGN_URL_EXPIRY) process.env.PRE_SIGN_URL_EXPIRY &&
!Number.isNaN(process.env.PRE_SIGN_URL_EXPIRY)
? Number.parseInt(process.env.PRE_SIGN_URL_EXPIRY, 10) ? Number.parseInt(process.env.PRE_SIGN_URL_EXPIRY, 10)
: constants.defaultPreSignedURLExpiry * 1000; : constants.defaultPreSignedURLExpiry * 1000;
if (expirationTime > currentTime + preSignedURLExpiry) { if (expirationTime > currentTime + preSignedURLExpiry) {
log.debug('expires parameter too far in future', log.debug('expires parameter too far in future', {
{ expires: request.query.Expires }); expires: request.query.Expires,
});
return { err: errors.AccessDenied }; return { err: errors.AccessDenied };
} }
if (currentTime > expirationTime) { if (currentTime > expirationTime) {
log.debug('current time exceeds expires time', log.debug('current time exceeds expires time', {
{ expires: request.query.Expires }); expires: request.query.Expires,
});
return { err: errors.RequestTimeTooSkewed }; return { err: errors.RequestTimeTooSkewed };
} }
const accessKey = data.AWSAccessKeyId; const accessKey = data.AWSAccessKeyId;
@ -82,5 +82,3 @@ function check(request, log, data) {
}, },
}; };
} }
module.exports = { check };

View File

@ -1,11 +0,0 @@
'use strict'; // eslint-disable-line strict
const headerAuthCheck = require('./headerAuthCheck');
const queryAuthCheck = require('./queryAuthCheck');
const authV4 = {
header: headerAuthCheck,
query: queryAuthCheck,
};
module.exports = authV4;

2
lib/auth/v4/authV4.ts Normal file
View File

@ -0,0 +1,2 @@
export * as header from './headerAuthCheck';
export * as query from './queryAuthCheck';

View File

@ -1,5 +1,3 @@
'use strict'; // eslint-disable-line strict
/* /*
AWS's URI encoding rules: AWS's URI encoding rules:
URI encode every byte. Uri-Encode() must enforce the following rules: URI encode every byte. Uri-Encode() must enforce the following rules:
@ -32,7 +30,7 @@ function _toHexUTF8(char) {
return res; return res;
} }
function awsURIencode(input, encodeSlash, noEncodeStar) { export default function awsURIencode(input, encodeSlash?: any, noEncodeStar?: any) {
const encSlash = encodeSlash === undefined ? true : encodeSlash; const encSlash = encodeSlash === undefined ? true : encodeSlash;
let encoded = ''; let encoded = '';
/** /**
@ -44,11 +42,15 @@ function awsURIencode(input, encodeSlash, noEncodeStar) {
} }
for (let i = 0; i < input.length; i++) { for (let i = 0; i < input.length; i++) {
let ch = input.charAt(i); let ch = input.charAt(i);
if ((ch >= 'A' && ch <= 'Z') || if (
(ch >= 'A' && ch <= 'Z') ||
(ch >= 'a' && ch <= 'z') || (ch >= 'a' && ch <= 'z') ||
(ch >= '0' && ch <= '9') || (ch >= '0' && ch <= '9') ||
ch === '_' || ch === '-' || ch === '_' ||
ch === '~' || ch === '.') { ch === '-' ||
ch === '~' ||
ch === '.'
) {
encoded = encoded.concat(ch); encoded = encoded.concat(ch);
} else if (ch === ' ') { } else if (ch === ' ') {
encoded = encoded.concat('%20'); encoded = encoded.concat('%20');
@ -76,5 +78,3 @@ function awsURIencode(input, encodeSlash, noEncodeStar) {
} }
return encoded; return encoded;
} }
module.exports = awsURIencode;

View File

@ -1,17 +1,22 @@
'use strict'; // eslint-disable-line strict import * as crypto from 'crypto';
import createCanonicalRequest from './createCanonicalRequest';
const crypto = require('crypto');
const createCanonicalRequest = require('./createCanonicalRequest');
/** /**
* constructStringToSign - creates V4 stringToSign * constructStringToSign - creates V4 stringToSign
* @param {object} params - params object * @param {object} params - params object
* @returns {string} - stringToSign * @returns {string} - stringToSign
*/ */
function constructStringToSign(params) { export default function constructStringToSign(params): string {
const { request, signedHeaders, payloadChecksum, credentialScope, timestamp, const {
query, log, proxyPath } = params; request,
signedHeaders,
payloadChecksum,
credentialScope,
timestamp,
query,
log,
proxyPath,
} = params;
const path = proxyPath || request.path; const path = proxyPath || request.path;
const canonicalReqResult = createCanonicalRequest({ const canonicalReqResult = createCanonicalRequest({
@ -34,11 +39,11 @@ function constructStringToSign(params) {
log.debug('constructed canonicalRequest', { canonicalReqResult }); log.debug('constructed canonicalRequest', { canonicalReqResult });
} }
const sha256 = crypto.createHash('sha256'); const sha256 = crypto.createHash('sha256');
const canonicalHex = sha256.update(canonicalReqResult, 'binary') const canonicalHex = sha256
.update(canonicalReqResult, 'binary')
.digest('hex'); .digest('hex');
const stringToSign = `AWS4-HMAC-SHA256\n${timestamp}\n` + const stringToSign =
`AWS4-HMAC-SHA256\n${timestamp}\n` +
`${credentialScope}\n${canonicalHex}`; `${credentialScope}\n${canonicalHex}`;
return stringToSign; return stringToSign;
} }
module.exports = constructStringToSign;

View File

@ -1,8 +1,6 @@
'use strict'; // eslint-disable-line strict import awsURIencode from './awsURIencode';
import * as crypto from 'crypto';
const awsURIencode = require('./awsURIencode'); import * as queryString from 'querystring';
const crypto = require('crypto');
const queryString = require('querystring');
/** /**
* createCanonicalRequest - creates V4 canonical request * createCanonicalRequest - creates V4 canonical request
@ -12,7 +10,7 @@ const queryString = require('querystring');
* payloadChecksum (from request) * payloadChecksum (from request)
* @returns {string} - canonicalRequest * @returns {string} - canonicalRequest
*/ */
function createCanonicalRequest(params) { export default function createCanonicalRequest(params) {
const pHttpVerb = params.pHttpVerb; const pHttpVerb = params.pHttpVerb;
const pResource = params.pResource; const pResource = params.pResource;
const pQuery = params.pQuery; const pQuery = params.pQuery;
@ -87,5 +85,3 @@ function createCanonicalRequest(params) {
`${signedHeaders}\n${payloadChecksum}`; `${signedHeaders}\n${payloadChecksum}`;
return canonicalRequest; return canonicalRequest;
} }
module.exports = createCanonicalRequest;

View File

@ -1,16 +1,16 @@
'use strict'; // eslint-disable-line strict import errors from '../../../lib/errors';
import * as constants from '../../constants';
const errors = require('../../../lib/errors'); import constructStringToSign from './constructStringToSign';
const constants = require('../../constants'); import {
checkTimeSkew,
const constructStringToSign = require('./constructStringToSign'); convertUTCtoISO8601,
const checkTimeSkew = require('./timeUtils').checkTimeSkew; convertAmzTimeToMs,
const convertUTCtoISO8601 = require('./timeUtils').convertUTCtoISO8601; } from './timeUtils';
const convertAmzTimeToMs = require('./timeUtils').convertAmzTimeToMs; import {
const extractAuthItems = require('./validateInputs').extractAuthItems; extractAuthItems,
const validateCredentials = require('./validateInputs').validateCredentials; validateCredentials,
const areSignedHeadersComplete = areSignedHeadersComplete,
require('./validateInputs').areSignedHeadersComplete; } from './validateInputs';
/** /**
* V4 header auth check * V4 header auth check
@ -21,7 +21,7 @@ const areSignedHeadersComplete =
* @param {string} awsService - Aws service ('iam' or 's3') * @param {string} awsService - Aws service ('iam' or 's3')
* @return {callback} calls callback * @return {callback} calls callback
*/ */
function check(request, log, data, awsService) { export function check(request, log, data, awsService) {
log.trace('running header auth check'); log.trace('running header auth check');
const token = request.headers['x-amz-security-token']; const token = request.headers['x-amz-security-token'];
@ -51,8 +51,9 @@ function check(request, log, data, awsService) {
if (payloadChecksum === 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') { if (payloadChecksum === 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') {
log.trace('requesting streaming v4 auth'); log.trace('requesting streaming v4 auth');
if (request.method !== 'PUT') { if (request.method !== 'PUT') {
log.debug('streaming v4 auth for put only', log.debug('streaming v4 auth for put only', {
{ method: 'auth/v4/headerAuthCheck.check' }); method: 'auth/v4/headerAuthCheck.check',
});
return { err: errors.InvalidArgument }; return { err: errors.InvalidArgument };
} }
if (!request.headers['x-amz-decoded-content-length']) { if (!request.headers['x-amz-decoded-content-length']) {
@ -77,9 +78,12 @@ function check(request, log, data, awsService) {
if (xAmzDate) { if (xAmzDate) {
const xAmzDateArr = xAmzDate.split('T'); const xAmzDateArr = xAmzDate.split('T');
// check that x-amz- date has the correct format and after epochTime // check that x-amz- date has the correct format and after epochTime
if (xAmzDateArr.length === 2 && xAmzDateArr[0].length === 8 if (
&& xAmzDateArr[1].length === 7 xAmzDateArr.length === 2 &&
&& Number.parseInt(xAmzDateArr[0], 10) > 19700101) { xAmzDateArr[0].length === 8 &&
xAmzDateArr[1].length === 7 &&
Number.parseInt(xAmzDateArr[0], 10) > 19700101
) {
// format of x-amz- date is ISO 8601: YYYYMMDDTHHMMSSZ // format of x-amz- date is ISO 8601: YYYYMMDDTHHMMSSZ
timestamp = request.headers['x-amz-date']; timestamp = request.headers['x-amz-date'];
} }
@ -87,18 +91,27 @@ function check(request, log, data, awsService) {
timestamp = convertUTCtoISO8601(request.headers.date); timestamp = convertUTCtoISO8601(request.headers.date);
} }
if (!timestamp) { if (!timestamp) {
log.debug('missing or invalid date header', log.debug('missing or invalid date header', {
{ method: 'auth/v4/headerAuthCheck.check' }); method: 'auth/v4/headerAuthCheck.check',
return { err: errors.AccessDenied. });
customizeDescription('Authentication requires a valid Date or ' + return {
'x-amz-date header') }; err: errors.AccessDenied.customizeDescription(
'Authentication requires a valid Date or ' + 'x-amz-date header'
),
};
} }
const validationResult = validateCredentials(credentialsArr, timestamp, const validationResult = validateCredentials(
log); credentialsArr,
timestamp,
log
);
if (validationResult instanceof Error) { if (validationResult instanceof Error) {
log.debug('credentials in improper format', { credentialsArr, log.debug('credentials in improper format', {
timestamp, validationResult }); credentialsArr,
timestamp,
validationResult,
});
return { err: validationResult }; return { err: validationResult };
} }
// credentialsArr is [accessKey, date, region, aws-service, aws4_request] // credentialsArr is [accessKey, date, region, aws-service, aws4_request]
@ -121,7 +134,7 @@ function check(request, log, data, awsService) {
// expiry is as set out in the policy. // expiry is as set out in the policy.
// 15 minutes in seconds // 15 minutes in seconds
const expiry = (15 * 60); const expiry = 15 * 60;
const isTimeSkewed = checkTimeSkew(timestamp, expiry, log); const isTimeSkewed = checkTimeSkew(timestamp, expiry, log);
if (isTimeSkewed) { if (isTimeSkewed) {
return { err: errors.RequestTimeTooSkewed }; return { err: errors.RequestTimeTooSkewed };
@ -133,8 +146,11 @@ function check(request, log, data, awsService) {
proxyPath = decodeURIComponent(request.headers.proxy_path); proxyPath = decodeURIComponent(request.headers.proxy_path);
} catch (err) { } catch (err) {
log.debug('invalid proxy_path header', { proxyPath, err }); log.debug('invalid proxy_path header', { proxyPath, err });
return { err: errors.InvalidArgument.customizeDescription( return {
'invalid proxy_path header') }; err: errors.InvalidArgument.customizeDescription(
'invalid proxy_path header'
),
};
} }
} }
@ -154,7 +170,6 @@ function check(request, log, data, awsService) {
return { err: stringToSign }; return { err: stringToSign };
} }
return { return {
err: null, err: null,
params: { params: {
@ -178,5 +193,3 @@ function check(request, log, data, awsService) {
}, },
}; };
} }
module.exports = { check };

View File

@ -1,15 +1,10 @@
'use strict'; // eslint-disable-line strict import * as constants from '../../constants';
import errors from '../../errors';
const constants = require('../../constants'); import constructStringToSign from './constructStringToSign';
const errors = require('../../errors'); import { checkTimeSkew, convertAmzTimeToMs } from './timeUtils';
import { validateCredentials, extractQueryParams } from './validateInputs';
const constructStringToSign = require('./constructStringToSign'); import { areSignedHeadersComplete } from './validateInputs';
const checkTimeSkew = require('./timeUtils').checkTimeSkew;
const convertAmzTimeToMs = require('./timeUtils').convertAmzTimeToMs;
const validateCredentials = require('./validateInputs').validateCredentials;
const extractQueryParams = require('./validateInputs').extractQueryParams;
const areSignedHeadersComplete =
require('./validateInputs').areSignedHeadersComplete;
/** /**
* V4 query auth check * V4 query auth check
@ -18,7 +13,7 @@ const areSignedHeadersComplete =
* @param {object} data - Contain authentification params (GET or POST data) * @param {object} data - Contain authentification params (GET or POST data)
* @return {callback} calls callback * @return {callback} calls callback
*/ */
function check(request, log, data) { export function check(request, log, data) {
const authParams = extractQueryParams(data, log); const authParams = extractQueryParams(data, log);
if (Object.keys(authParams).length !== 5) { if (Object.keys(authParams).length !== 5) {
@ -44,11 +39,13 @@ function check(request, log, data) {
return { err: errors.AccessDenied }; return { err: errors.AccessDenied };
} }
const validationResult = validateCredentials(credential, timestamp, const validationResult = validateCredentials(credential, timestamp, log);
log);
if (validationResult instanceof Error) { if (validationResult instanceof Error) {
log.debug('credentials in improper format', { credential, log.debug('credentials in improper format', {
timestamp, validationResult }); credential,
timestamp,
validationResult,
});
return { err: validationResult }; return { err: validationResult };
} }
const accessKey = credential[0]; const accessKey = credential[0];
@ -68,8 +65,11 @@ function check(request, log, data) {
proxyPath = decodeURIComponent(request.headers.proxy_path); proxyPath = decodeURIComponent(request.headers.proxy_path);
} catch (err) { } catch (err) {
log.debug('invalid proxy_path header', { proxyPath }); log.debug('invalid proxy_path header', { proxyPath });
return { err: errors.InvalidArgument.customizeDescription( return {
'invalid proxy_path header') }; err: errors.InvalidArgument.customizeDescription(
'invalid proxy_path header'
),
};
} }
} }
@ -95,8 +95,7 @@ function check(request, log, data) {
signedHeaders, signedHeaders,
payloadChecksum, payloadChecksum,
timestamp, timestamp,
credentialScope: credentialScope: `${scopeDate}/${region}/${service}/${requestType}`,
`${scopeDate}/${region}/${service}/${requestType}`,
awsService: service, awsService: service,
proxyPath, proxyPath,
}); });
@ -122,5 +121,3 @@ function check(request, log, data) {
}, },
}; };
} }
module.exports = { check };

View File

@ -1,15 +1,31 @@
const { Transform } = require('stream'); import { Transform } from 'stream';
import async from 'async';
const async = require('async'); import errors from '../../../errors';
const errors = require('../../../errors'); import constructChunkStringToSign from './constructChunkStringToSign';
const constructChunkStringToSign = require('./constructChunkStringToSign');
/** /**
* This class is designed to handle the chunks sent in a streaming * This class is designed to handle the chunks sent in a streaming
* v4 Auth request * v4 Auth request
*/ */
class V4Transform extends Transform { export default class V4Transform extends Transform {
log;
cb;
accessKey;
region;
scopeDate;
timestamp;
credentialScope;
lastSignature;
currentSignature;
haveMetadata;
seekingDataSize;
currentData;
dataCursor;
currentMetadata;
lastPieceDone;
lastChunk;
vault;
/** /**
* @constructor * @constructor
* @param {object} streamingV4Params - info for chunk authentication * @param {object} streamingV4Params - info for chunk authentication
@ -28,8 +44,14 @@ class V4Transform extends Transform {
* @param {function} cb - callback to api * @param {function} cb - callback to api
*/ */
constructor(streamingV4Params, vault, log, cb) { constructor(streamingV4Params, vault, log, cb) {
const { accessKey, signatureFromRequest, region, scopeDate, timestamp, const {
credentialScope } = streamingV4Params; accessKey,
signatureFromRequest,
region,
scopeDate,
timestamp,
credentialScope,
} = streamingV4Params;
super({}); super({});
this.log = log; this.log = log;
this.cb = cb; this.cb = cb;
@ -79,28 +101,30 @@ class V4Transform extends Transform {
this.currentMetadata.push(remainingPlusStoredMetadata); this.currentMetadata.push(remainingPlusStoredMetadata);
return { completeMetadata: false }; return { completeMetadata: false };
} }
let fullMetadata = remainingPlusStoredMetadata.slice(0, let fullMetadata = remainingPlusStoredMetadata.slice(0, lineBreakIndex);
lineBreakIndex);
// handle extra line break on end of data chunk // handle extra line break on end of data chunk
if (fullMetadata.length === 0) { if (fullMetadata.length === 0) {
const chunkWithoutLeadingLineBreak = remainingPlusStoredMetadata const chunkWithoutLeadingLineBreak =
.slice(2); remainingPlusStoredMetadata.slice(2);
// find second line break // find second line break
lineBreakIndex = chunkWithoutLeadingLineBreak.indexOf('\r\n'); lineBreakIndex = chunkWithoutLeadingLineBreak.indexOf('\r\n');
if (lineBreakIndex < 0) { if (lineBreakIndex < 0) {
this.currentMetadata.push(chunkWithoutLeadingLineBreak); this.currentMetadata.push(chunkWithoutLeadingLineBreak);
return { completeMetadata: false }; return { completeMetadata: false };
} }
fullMetadata = chunkWithoutLeadingLineBreak.slice(0, fullMetadata = chunkWithoutLeadingLineBreak.slice(
lineBreakIndex); 0,
lineBreakIndex
);
} }
const splitMeta = fullMetadata.toString().split(';'); const splitMeta = fullMetadata.toString().split(';');
this.log.trace('parsed full metadata for chunk', { splitMeta }); this.log.trace('parsed full metadata for chunk', { splitMeta });
if (splitMeta.length !== 2) { if (splitMeta.length !== 2) {
this.log.trace('chunk body did not contain correct ' + this.log.trace(
'metadata format'); 'chunk body did not contain correct ' + 'metadata format'
);
return { err: errors.InvalidArgument }; return { err: errors.InvalidArgument };
} }
let dataSize = splitMeta[0]; let dataSize = splitMeta[0];
@ -132,8 +156,9 @@ class V4Transform extends Transform {
completeMetadata: true, completeMetadata: true,
// start slice at lineBreak plus 2 to remove line break at end of // start slice at lineBreak plus 2 to remove line break at end of
// metadata piece since length of '\r\n' is 2 // metadata piece since length of '\r\n' is 2
unparsedChunk: remainingPlusStoredMetadata unparsedChunk: remainingPlusStoredMetadata.slice(
.slice(lineBreakIndex + 2), lineBreakIndex + 2
),
}; };
} }
@ -146,10 +171,13 @@ class V4Transform extends Transform {
*/ */
_authenticate(dataToSend, done) { _authenticate(dataToSend, done) {
// use prior sig to construct new string to sign // use prior sig to construct new string to sign
const stringToSign = constructChunkStringToSign(this.timestamp, const stringToSign = constructChunkStringToSign(
this.credentialScope, this.lastSignature, dataToSend); this.timestamp,
this.log.trace('constructed chunk string to sign', this.credentialScope,
{ stringToSign }); this.lastSignature,
dataToSend
);
this.log.trace('constructed chunk string to sign', { stringToSign });
// once used prior sig to construct string to sign, reassign // once used prior sig to construct string to sign, reassign
// lastSignature to current signature // lastSignature to current signature
this.lastSignature = this.currentSignature; this.lastSignature = this.currentSignature;
@ -165,17 +193,18 @@ class V4Transform extends Transform {
credentialScope: this.credentialScope, credentialScope: this.credentialScope,
}, },
}; };
return this.vault.authenticateV4Request(vaultParams, null, err => { return this.vault.authenticateV4Request(vaultParams, null, (err) => {
if (err) { if (err) {
this.log.trace('err from vault on streaming v4 auth', this.log.trace('err from vault on streaming v4 auth', {
{ error: err, paramsSentToVault: vaultParams.data }); error: err,
paramsSentToVault: vaultParams.data,
});
return done(err); return done(err);
} }
return done(); return done();
}); });
} }
/** /**
* This function will parse the chunk into metadata and data, * This function will parse the chunk into metadata and data,
* use the metadata to authenticate with vault and send the * use the metadata to authenticate with vault and send the
@ -195,9 +224,10 @@ class V4Transform extends Transform {
if (this.lastPieceDone) { if (this.lastPieceDone) {
const slice = chunk.slice(0, 10); const slice = chunk.slice(0, 10);
this.log.trace('received chunk after end.' + this.log.trace(
'See first 10 bytes of chunk', 'received chunk after end.' + 'See first 10 bytes of chunk',
{ chunk: slice.toString() }); { chunk: slice.toString() }
);
return callback(); return callback();
} }
let unparsedChunk = chunk; let unparsedChunk = chunk;
@ -206,10 +236,11 @@ class V4Transform extends Transform {
// test function // test function
() => chunkLeftToEvaluate, () => chunkLeftToEvaluate,
// async function // async function
done => { (done) => {
if (!this.haveMetadata) { if (!this.haveMetadata) {
this.log.trace('do not have metadata so calling ' + this.log.trace(
'_parseMetadata'); 'do not have metadata so calling ' + '_parseMetadata'
);
// need to parse our metadata // need to parse our metadata
const parsedMetadataResults = const parsedMetadataResults =
this._parseMetadata(unparsedChunk); this._parseMetadata(unparsedChunk);
@ -227,7 +258,7 @@ class V4Transform extends Transform {
} }
if (this.lastChunk) { if (this.lastChunk) {
this.log.trace('authenticating final chunk with no data'); this.log.trace('authenticating final chunk with no data');
return this._authenticate(null, err => { return this._authenticate(null, (err) => {
if (err) { if (err) {
return done(err); return done(err);
} }
@ -246,17 +277,18 @@ class V4Transform extends Transform {
} }
// parse just the next data piece without \r\n at the end // parse just the next data piece without \r\n at the end
// (therefore, minus 2) // (therefore, minus 2)
const nextDataPiece = const nextDataPiece = unparsedChunk.slice(
unparsedChunk.slice(0, this.seekingDataSize - 2); 0,
this.seekingDataSize - 2
);
// add parsed data piece to other currentData pieces // add parsed data piece to other currentData pieces
// so that this.currentData is the full data piece // so that this.currentData is the full data piece
nextDataPiece.copy(this.currentData, this.dataCursor); nextDataPiece.copy(this.currentData, this.dataCursor);
return this._authenticate(this.currentData, err => { return this._authenticate(this.currentData, (err) => {
if (err) { if (err) {
return done(err); return done(err);
} }
unparsedChunk = unparsedChunk = unparsedChunk.slice(this.seekingDataSize);
unparsedChunk.slice(this.seekingDataSize);
this.push(this.currentData); this.push(this.currentData);
this.haveMetadata = false; this.haveMetadata = false;
this.seekingDataSize = -1; this.seekingDataSize = -1;
@ -267,15 +299,13 @@ class V4Transform extends Transform {
}); });
}, },
// final callback // final callback
err => { (err) => {
if (err) { if (err) {
return this.cb(err); return this.cb(err);
} }
// get next chunk // get next chunk
return callback(); return callback();
}, }
); );
} }
} }
module.exports = V4Transform;

View File

@ -1,6 +1,5 @@
const crypto = require('crypto'); import * as crypto from 'crypto';
import * as constants from '../../../constants';
const constants = require('../../../constants');
/** /**
* Constructs stringToSign for chunk * Constructs stringToSign for chunk
@ -13,8 +12,12 @@ const constants = require('../../../constants');
* @param {string} justDataChunk - data portion of chunk * @param {string} justDataChunk - data portion of chunk
* @returns {string} stringToSign * @returns {string} stringToSign
*/ */
function constructChunkStringToSign(timestamp, export default function constructChunkStringToSign(
credentialScope, lastSignature, justDataChunk) { timestamp: string,
credentialScope: string,
lastSignature: string,
justDataChunk: string
): string {
let currentChunkHash; let currentChunkHash;
// for last chunk, there will be no data, so use emptyStringHash // for last chunk, there will be no data, so use emptyStringHash
if (!justDataChunk) { if (!justDataChunk) {
@ -22,11 +25,12 @@ function constructChunkStringToSign(timestamp,
} else { } else {
currentChunkHash = crypto.createHash('sha256'); currentChunkHash = crypto.createHash('sha256');
currentChunkHash = currentChunkHash currentChunkHash = currentChunkHash
.update(justDataChunk, 'binary').digest('hex'); .update(justDataChunk, 'binary')
.digest('hex');
} }
return `AWS4-HMAC-SHA256-PAYLOAD\n${timestamp}\n` + return (
`AWS4-HMAC-SHA256-PAYLOAD\n${timestamp}\n` +
`${credentialScope}\n${lastSignature}\n` + `${credentialScope}\n${lastSignature}\n` +
`${constants.emptyStringHash}\n${currentChunkHash}`; `${constants.emptyStringHash}\n${currentChunkHash}`
);
} }
module.exports = constructChunkStringToSign;

View File

@ -1,12 +1,10 @@
'use strict'; // eslint-disable-line strict
/** /**
* Convert timestamp to milliseconds since Unix Epoch * Convert timestamp to milliseconds since Unix Epoch
* @param {string} timestamp of ISO8601Timestamp format without * @param {string} timestamp of ISO8601Timestamp format without
* dashes or colons, e.g. 20160202T220410Z * dashes or colons, e.g. 20160202T220410Z
* @return {number} number of milliseconds since Unix Epoch * @return {number} number of milliseconds since Unix Epoch
*/ */
function convertAmzTimeToMs(timestamp) { export function convertAmzTimeToMs(timestamp) {
const arr = timestamp.split(''); const arr = timestamp.split('');
// Convert to YYYY-MM-DDTHH:mm:ss.sssZ // Convert to YYYY-MM-DDTHH:mm:ss.sssZ
const ISO8601time = `${arr.slice(0, 4).join('')}-${arr[4]}${arr[5]}` + const ISO8601time = `${arr.slice(0, 4).join('')}-${arr[4]}${arr[5]}` +
@ -21,7 +19,7 @@ function convertAmzTimeToMs(timestamp) {
* @param {string} timestamp of UTC form: Fri, 10 Feb 2012 21:34:55 GMT * @param {string} timestamp of UTC form: Fri, 10 Feb 2012 21:34:55 GMT
* @return {string} ISO8601 timestamp of form: YYYYMMDDTHHMMSSZ * @return {string} ISO8601 timestamp of form: YYYYMMDDTHHMMSSZ
*/ */
function convertUTCtoISO8601(timestamp) { export function convertUTCtoISO8601(timestamp) {
// convert to ISO string: YYYY-MM-DDTHH:mm:ss.sssZ. // convert to ISO string: YYYY-MM-DDTHH:mm:ss.sssZ.
const converted = new Date(timestamp).toISOString(); const converted = new Date(timestamp).toISOString();
// Remove "-"s and "."s and milliseconds // Remove "-"s and "."s and milliseconds
@ -36,7 +34,7 @@ function convertUTCtoISO8601(timestamp) {
* @param {object} log - log for request * @param {object} log - log for request
* @return {boolean} true if there is a time problem * @return {boolean} true if there is a time problem
*/ */
function checkTimeSkew(timestamp, expiry, log) { export function checkTimeSkew(timestamp, expiry, log) {
const currentTime = Date.now(); const currentTime = Date.now();
const fifteenMinutes = (15 * 60 * 1000); const fifteenMinutes = (15 * 60 * 1000);
const parsedTimestamp = convertAmzTimeToMs(timestamp); const parsedTimestamp = convertAmzTimeToMs(timestamp);
@ -56,5 +54,3 @@ function checkTimeSkew(timestamp, expiry, log) {
} }
return false; return false;
} }
module.exports = { convertAmzTimeToMs, convertUTCtoISO8601, checkTimeSkew };

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line strict import errors from '../../../lib/errors';
const errors = require('../../../lib/errors');
/** /**
* Validate Credentials * Validate Credentials
@ -11,7 +9,7 @@ const errors = require('../../../lib/errors');
* @param {object} log - logging object * @param {object} log - logging object
* @return {boolean} true if credentials are correct format, false if not * @return {boolean} true if credentials are correct format, false if not
*/ */
function validateCredentials(credentials, timestamp, log) { export function validateCredentials(credentials, timestamp, log) {
if (!Array.isArray(credentials) || credentials.length !== 5) { if (!Array.isArray(credentials) || credentials.length !== 5) {
log.warn('credentials in improper format', { credentials }); log.warn('credentials in improper format', { credentials });
return errors.InvalidArgument; return errors.InvalidArgument;
@ -37,20 +35,27 @@ function validateCredentials(credentials, timestamp, log) {
// convert timestamp to format of scopeDate YYYYMMDD // convert timestamp to format of scopeDate YYYYMMDD
const timestampDate = timestamp.split('T')[0]; const timestampDate = timestamp.split('T')[0];
if (scopeDate.length !== 8 || scopeDate !== timestampDate) { if (scopeDate.length !== 8 || scopeDate !== timestampDate) {
log.warn('scope date must be the same date as the timestamp date', log.warn('scope date must be the same date as the timestamp date', {
{ scopeDate, timestampDate }); scopeDate,
timestampDate,
});
return errors.RequestTimeTooSkewed; return errors.RequestTimeTooSkewed;
} }
if (service !== 's3' && service !== 'iam' && service !== 'ring' && if (
service !== 'sts') { service !== 's3' &&
service !== 'iam' &&
service !== 'ring' &&
service !== 'sts'
) {
log.warn('service in credentials is not one of s3/iam/ring/sts', { log.warn('service in credentials is not one of s3/iam/ring/sts', {
service, service,
}); });
return errors.InvalidArgument; return errors.InvalidArgument;
} }
if (requestType !== 'aws4_request') { if (requestType !== 'aws4_request') {
log.warn('requestType contained in params is not aws4_request', log.warn('requestType contained in params is not aws4_request', {
{ requestType }); requestType,
});
return errors.InvalidArgument; return errors.InvalidArgument;
} }
return {}; return {};
@ -62,13 +67,14 @@ function validateCredentials(credentials, timestamp, log) {
* @param {object} log - logging object * @param {object} log - logging object
* @return {object} object containing extracted query params for authV4 * @return {object} object containing extracted query params for authV4
*/ */
function extractQueryParams(queryObj, log) { export function extractQueryParams(queryObj, log) {
const authParams = {}; const authParams = {};
// Do not need the algorithm sent back // Do not need the algorithm sent back
if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') { if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') {
log.warn('algorithm param incorrect', log.warn('algorithm param incorrect', {
{ algo: queryObj['X-Amz-Algorithm'] }); algo: queryObj['X-Amz-Algorithm'],
});
return authParams; return authParams;
} }
@ -81,7 +87,6 @@ function extractQueryParams(queryObj, log) {
return authParams; return authParams;
} }
const signature = queryObj['X-Amz-Signature']; const signature = queryObj['X-Amz-Signature'];
if (signature && signature.length === 64) { if (signature && signature.length === 64) {
authParams.signatureFromRequest = signature; authParams.signatureFromRequest = signature;
@ -94,14 +99,15 @@ function extractQueryParams(queryObj, log) {
if (timestamp && timestamp.length === 16) { if (timestamp && timestamp.length === 16) {
authParams.timestamp = timestamp; authParams.timestamp = timestamp;
} else { } else {
log.warn('missing or invalid timestamp', log.warn('missing or invalid timestamp', {
{ timestamp: queryObj['X-Amz-Date'] }); timestamp: queryObj['X-Amz-Date'],
});
return authParams; return authParams;
} }
const expiry = Number.parseInt(queryObj['X-Amz-Expires'], 10); const expiry = Number.parseInt(queryObj['X-Amz-Expires'], 10);
const sevenDays = 604800; const sevenDays = 604800;
if (expiry && (expiry > 0 && expiry <= sevenDays)) { if (expiry && expiry > 0 && expiry <= sevenDays) {
authParams.expiry = expiry; authParams.expiry = expiry;
} else { } else {
log.warn('invalid expiry', { expiry }); log.warn('invalid expiry', { expiry });
@ -118,17 +124,15 @@ function extractQueryParams(queryObj, log) {
return authParams; return authParams;
} }
/** /**
* Extract and validate components from auth header * Extract and validate components from auth header
* @param {string} authHeader - authorization header from request * @param {string} authHeader - authorization header from request
* @param {object} log - logging object * @param {object} log - logging object
* @return {object} object containing extracted auth header items for authV4 * @return {object} object containing extracted auth header items for authV4
*/ */
function extractAuthItems(authHeader, log) { export function extractAuthItems(authHeader, log) {
const authItems = {}; const authItems = {};
const authArray = authHeader const authArray = authHeader.replace('AWS4-HMAC-SHA256 ', '').split(',');
.replace('AWS4-HMAC-SHA256 ', '').split(',');
if (authArray.length < 3) { if (authArray.length < 3) {
return authItems; return authItems;
@ -138,25 +142,34 @@ function extractAuthItems(authHeader, log) {
const signedHeadersStr = authArray[1]; const signedHeadersStr = authArray[1];
const signatureStr = authArray[2]; const signatureStr = authArray[2];
log.trace('credentials from request', { credentialStr }); log.trace('credentials from request', { credentialStr });
if (credentialStr && credentialStr.trim().startsWith('Credential=') if (
&& credentialStr.indexOf('/') > -1) { credentialStr &&
credentialStr.trim().startsWith('Credential=') &&
credentialStr.indexOf('/') > -1
) {
authItems.credentialsArr = credentialStr authItems.credentialsArr = credentialStr
.trim().replace('Credential=', '').split('/'); .trim()
.replace('Credential=', '')
.split('/');
} else { } else {
log.warn('missing credentials'); log.warn('missing credentials');
} }
log.trace('signed headers from request', { signedHeadersStr }); log.trace('signed headers from request', { signedHeadersStr });
if (signedHeadersStr && signedHeadersStr.trim() if (
.startsWith('SignedHeaders=')) { signedHeadersStr &&
signedHeadersStr.trim().startsWith('SignedHeaders=')
) {
authItems.signedHeaders = signedHeadersStr authItems.signedHeaders = signedHeadersStr
.trim().replace('SignedHeaders=', ''); .trim()
.replace('SignedHeaders=', '');
} else { } else {
log.warn('missing signed headers'); log.warn('missing signed headers');
} }
log.trace('signature from request', { signatureStr }); log.trace('signature from request', { signatureStr });
if (signatureStr && signatureStr.trim().startsWith('Signature=')) { if (signatureStr && signatureStr.trim().startsWith('Signature=')) {
authItems.signatureFromRequest = signatureStr authItems.signatureFromRequest = signatureStr
.trim().replace('Signature=', ''); .trim()
.replace('Signature=', '');
} else { } else {
log.warn('missing signature'); log.warn('missing signature');
} }
@ -170,21 +183,20 @@ function extractAuthItems(authHeader, log) {
* @param {object} allHeaders - request.headers * @param {object} allHeaders - request.headers
* @return {boolean} true if all x-amz-headers included and false if not * @return {boolean} true if all x-amz-headers included and false if not
*/ */
function areSignedHeadersComplete(signedHeaders, allHeaders) { export function areSignedHeadersComplete(signedHeaders, allHeaders) {
const signedHeadersList = signedHeaders.split(';'); const signedHeadersList = signedHeaders.split(';');
if (signedHeadersList.indexOf('host') === -1) { if (signedHeadersList.indexOf('host') === -1) {
return false; return false;
} }
const headers = Object.keys(allHeaders); const headers = Object.keys(allHeaders);
for (let i = 0; i < headers.length; i++) { for (let i = 0; i < headers.length; i++) {
if ((headers[i].startsWith('x-amz-') if (
|| headers[i].startsWith('x-scal-')) (headers[i].startsWith('x-amz-') ||
&& signedHeadersList.indexOf(headers[i]) === -1) { headers[i].startsWith('x-scal-')) &&
signedHeadersList.indexOf(headers[i]) === -1
) {
return false; return false;
} }
} }
return true; return true;
} }
module.exports = { validateCredentials, extractQueryParams,
areSignedHeadersComplete, extractAuthItems };

View File

@ -1,151 +0,0 @@
'use strict'; // eslint-disable-line strict
const crypto = require('crypto');
// The min value here is to manage further backward compat if we
// need it
// Default value
const vaultGeneratedIamSecurityTokenSizeMin = 128;
// Safe to assume that a typical token size is less than 8192 bytes
const vaultGeneratedIamSecurityTokenSizeMax = 8192;
// Base-64
const vaultGeneratedIamSecurityTokenPattern = /^[A-Za-z0-9/+=]*$/;
module.exports = {
// info about the iam security token
iamSecurityToken: {
min: vaultGeneratedIamSecurityTokenSizeMin,
max: vaultGeneratedIamSecurityTokenSizeMax,
pattern: vaultGeneratedIamSecurityTokenPattern,
},
// PublicId is used as the canonicalID for a request that contains
// no authentication information. Requestor can access
// only public resources
publicId: 'http://acs.amazonaws.com/groups/global/AllUsers',
zenkoServiceAccount: 'http://acs.zenko.io/accounts/service',
metadataFileNamespace: '/MDFile',
dataFileURL: '/DataFile',
passthroughFileURL: '/PassthroughFile',
// AWS states max size for user-defined metadata
// (x-amz-meta- headers) is 2 KB:
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
// In testing, AWS seems to allow up to 88 more bytes,
// so we do the same.
maximumMetaHeadersSize: 2136,
emptyFileMd5: 'd41d8cd98f00b204e9800998ecf8427e',
// Version 2 changes the format of the data location property
// Version 3 adds the dataStoreName attribute
// Version 4 add the Creation-Time and Content-Language attributes,
// and add support for x-ms-meta-* headers in UserMetadata
// Version 5 adds the azureInfo structure
mdModelVersion: 5,
/*
* Splitter is used to build the object name for the overview of a
* multipart upload and to build the object names for each part of a
* multipart upload. These objects with large names are then stored in
* metadata in a "shadow bucket" to a real bucket. The shadow bucket
* contains all ongoing multipart uploads. We include in the object
* name some of the info we might need to pull about an open multipart
* upload or about an individual part with each piece of info separated
* by the splitter. We can then extract each piece of info by splitting
* the object name string with this splitter.
* For instance, assuming a splitter of '...!*!',
* the name of the upload overview would be:
* overview...!*!objectKey...!*!uploadId
* For instance, the name of a part would be:
* uploadId...!*!partNumber
*
* The sequence of characters used in the splitter should not occur
* elsewhere in the pieces of info to avoid splitting where not
* intended.
*
* Splitter is also used in adding bucketnames to the
* namespacerusersbucket. The object names added to the
* namespaceusersbucket are of the form:
* canonicalID...!*!bucketname
*/
splitter: '..|..',
usersBucket: 'users..bucket',
// MPU Bucket Prefix is used to create the name of the shadow
// bucket used for multipart uploads. There is one shadow mpu
// bucket per bucket and its name is the mpuBucketPrefix followed
// by the name of the final destination bucket for the object
// once the multipart upload is complete.
mpuBucketPrefix: 'mpuShadowBucket',
// since aws s3 does not allow capitalized buckets, these may be
// used for special internal purposes
permittedCapitalizedBuckets: {
METADATA: true,
},
// Setting a lower object key limit to account for:
// - Mongo key limit of 1012 bytes
// - Version ID in Mongo Key if versioned of 33
// - Max bucket name length if bucket match false of 63
// - Extra prefix slash for bucket prefix if bucket match of 1
objectKeyByteLimit: 915,
/* delimiter for location-constraint. The location constraint will be able
* to include the ingestion flag
*/
zenkoSeparator: ':',
/* eslint-disable camelcase */
externalBackends: { aws_s3: true, azure: true, gcp: true, pfs: true },
replicationBackends: { aws_s3: true, azure: true, gcp: true },
// hex digest of sha256 hash of empty string:
emptyStringHash: crypto.createHash('sha256')
.update('', 'binary').digest('hex'),
mpuMDStoredExternallyBackend: { aws_s3: true, gcp: true },
// AWS sets a minimum size limit for parts except for the last part.
// http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
minimumAllowedPartSize: 5242880,
gcpMaximumAllowedPartCount: 1024,
// GCP Object Tagging Prefix
gcpTaggingPrefix: 'aws-tag-',
productName: 'APN/1.0 Scality/1.0 Scality CloudServer for Zenko',
legacyLocations: ['sproxyd', 'legacy'],
// healthcheck default call from nginx is every 2 seconds
// for external backends, don't call unless at least 1 minute
// (60,000 milliseconds) since last call
externalBackendHealthCheckInterval: 60000,
// some of the available data backends (if called directly rather
// than through the multiple backend gateway) need a key provided
// as a string as first parameter of the get/delete methods.
clientsRequireStringKey: { sproxyd: true, cdmi: true },
hasCopyPartBackends: { aws_s3: true, gcp: true },
versioningNotImplBackends: { azure: true, gcp: true },
// user metadata applied on zenko-created objects
zenkoIDHeader: 'x-amz-meta-zenko-instance-id',
// Default expiration value of the S3 pre-signed URL duration
// 604800 seconds (seven days).
defaultPreSignedURLExpiry: 7 * 24 * 60 * 60,
// Regex for ISO-8601 formatted date
shortIso8601Regex: /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z/,
longIso8601Regex: /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/,
supportedNotificationEvents: new Set([
's3:ObjectCreated:*',
's3:ObjectCreated:Put',
's3:ObjectCreated:Copy',
's3:ObjectCreated:CompleteMultipartUpload',
's3:ObjectRemoved:*',
's3:ObjectRemoved:Delete',
's3:ObjectRemoved:DeleteMarkerCreated',
]),
notificationArnPrefix: 'arn:scality:bucketnotif',
// HTTP server keep-alive timeout is set to a higher value than
// client's free sockets timeout to avoid the risk of triggering
// ECONNRESET errors if the server closes the connection at the
// exact moment clients attempt to reuse an established connection
// for a new request.
//
// Note: the ability to close inactive connections on the client
// after httpClientFreeSocketsTimeout milliseconds requires the
// use of "agentkeepalive" module instead of the regular node.js
// http.Agent.
httpServerKeepAliveTimeout: 60000,
httpClientFreeSocketTimeout: 55000,
supportedLifecycleRules: [
'expiration',
'noncurrentVersionExpiration',
'abortIncompleteMultipartUpload',
],
};

147
lib/constants.ts Normal file
View File

@ -0,0 +1,147 @@
import * as crypto from 'crypto';
// The min value here is to manage further backward compat if we need it
// Default value
const vaultGeneratedIamSecurityTokenSizeMin = 128;
// Safe to assume that a typical token size is less than 8192 bytes
const vaultGeneratedIamSecurityTokenSizeMax = 8192;
// Base-64
const vaultGeneratedIamSecurityTokenPattern = /^[A-Za-z0-9/+=]*$/;
// info about the iam security token
export const iamSecurityToken = {
min: vaultGeneratedIamSecurityTokenSizeMin,
max: vaultGeneratedIamSecurityTokenSizeMax,
pattern: vaultGeneratedIamSecurityTokenPattern,
};
// PublicId is used as the canonicalID for a request that contains
// no authentication information. Requestor can access
// only public resources
export const publicId = 'http://acs.amazonaws.com/groups/global/AllUsers';
export const zenkoServiceAccount = 'http://acs.zenko.io/accounts/service';
export const metadataFileNamespace = '/MDFile';
export const dataFileURL = '/DataFile';
export const passthroughFileURL = '/PassthroughFile';
// AWS states max size for user-defined metadata
// (x-amz-meta- headers) is 2 KB:
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
// In testing, AWS seems to allow up to 88 more bytes,
// so we do the same.
export const maximumMetaHeadersSize = 2136;
export const emptyFileMd5 = 'd41d8cd98f00b204e9800998ecf8427e';
// Version 2 changes the format of the data location property
// Version 3 adds the dataStoreName attribute
// Version 4 add the Creation-Time and Content-Language attributes,
// and add support for x-ms-meta-* headers in UserMetadata
// Version 5 adds the azureInfo structure
export const mdModelVersion = 5;
/*
* Splitter is used to build the object name for the overview of a
* multipart upload and to build the object names for each part of a
* multipart upload. These objects with large names are then stored in
* metadata in a "shadow bucket" to a real bucket. The shadow bucket
* contains all ongoing multipart uploads. We include in the object
* name some of the info we might need to pull about an open multipart
* upload or about an individual part with each piece of info separated
* by the splitter. We can then extract each piece of info by splitting
* the object name string with this splitter.
* For instance, assuming a splitter of '...!*!',
* the name of the upload overview would be:
* overview...!*!objectKey...!*!uploadId
* For instance, the name of a part would be:
* uploadId...!*!partNumber
*
* The sequence of characters used in the splitter should not occur
* elsewhere in the pieces of info to avoid splitting where not
* intended.
*
* Splitter is also used in adding bucketnames to the
* namespacerusersbucket. The object names added to the
* namespaceusersbucket are of the form:
* canonicalID...!*!bucketname
*/
export const splitter = '..|..';
export const usersBucket = 'users..bucket';
// MPU Bucket Prefix is used to create the name of the shadow
// bucket used for multipart uploads. There is one shadow mpu
// bucket per bucket and its name is the mpuBucketPrefix followed
// by the name of the final destination bucket for the object
// once the multipart upload is complete.
export const mpuBucketPrefix = 'mpuShadowBucket';
// since aws s3 does not allow capitalized buckets, these may be
// used for special internal purposes
export const permittedCapitalizedBuckets = {
METADATA: true,
};
// Setting a lower object key limit to account for:
// - Mongo key limit of 1012 bytes
// - Version ID in Mongo Key if versioned of 33
// - Max bucket name length if bucket match false of 63
// - Extra prefix slash for bucket prefix if bucket match of 1
export const objectKeyByteLimit = 915;
/* delimiter for location-constraint. The location constraint will be able
* to include the ingestion flag
*/
export const zenkoSeparator = ':';
/* eslint-disable camelcase */
export const externalBackends = { aws_s3: true, azure: true, gcp: true, pfs: true };
export const replicationBackends = { aws_s3: true, azure: true, gcp: true };
// hex digest of sha256 hash of empty string:
export const emptyStringHash = crypto.createHash('sha256')
.update('', 'binary').digest('hex');
export const mpuMDStoredExternallyBackend = { aws_s3: true, gcp: true };
// AWS sets a minimum size limit for parts except for the last part.
// http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
export const minimumAllowedPartSize = 5242880;
export const gcpMaximumAllowedPartCount = 1024;
// GCP Object Tagging Prefix
export const gcpTaggingPrefix = 'aws-tag-';
export const productName = 'APN/1.0 Scality/1.0 Scality CloudServer for Zenko';
export const legacyLocations = ['sproxyd', 'legacy'];
// healthcheck default call from nginx is every 2 seconds
// for external backends, don't call unless at least 1 minute
// (60,000 milliseconds) since last call
export const externalBackendHealthCheckInterval = 60000;
// some of the available data backends (if called directly rather
// than through the multiple backend gateway) need a key provided
// as a string as first parameter of the get/delete methods.
export const clientsRequireStringKey = { sproxyd: true, cdmi: true };
export const hasCopyPartBackends = { aws_s3: true, gcp: true };
export const versioningNotImplBackends = { azure: true, gcp: true };
// user metadata applied on zenko-created objects
export const zenkoIDHeader = 'x-amz-meta-zenko-instance-id';
// Default expiration value of the S3 pre-signed URL duration
// 604800 seconds (seven days).
export const defaultPreSignedURLExpiry = 7 * 24 * 60 * 60;
// Regex for ISO-8601 formatted date
export const shortIso8601Regex = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z/;
export const longIso8601Regex = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/;
export const supportedNotificationEvents = new Set([
's3:ObjectCreated:*',
's3:ObjectCreated:Put',
's3:ObjectCreated:Copy',
's3:ObjectCreated:CompleteMultipartUpload',
's3:ObjectRemoved:*',
's3:ObjectRemoved:Delete',
's3:ObjectRemoved:DeleteMarkerCreated',
]);
export const notificationArnPrefix = 'arn:scality:bucketnotif';
// HTTP server keep-alive timeout is set to a higher value than
// client's free sockets timeout to avoid the risk of triggering
// ECONNRESET errors if the server closes the connection at the
// exact moment clients attempt to reuse an established connection
// for a new request.
//
// Note: the ability to close inactive connections on the client
// after httpClientFreeSocketsTimeout milliseconds requires the
// use of "agentkeepalive" module instead of the regular node.js
// http.Agent.
export const httpServerKeepAliveTimeout = 60000;
export const httpClientFreeSocketTimeout = 55000;
export const supportedLifecycleRules = [
'expiration',
'noncurrentVersionExpiration',
'abortIncompleteMultipartUpload',
];

View File

@ -1,4 +1,4 @@
'use strict'; // eslint-disable-line strict import { LevelDB } from 'level';
const writeOptions = { sync: true }; const writeOptions = { sync: true };
@ -18,7 +18,7 @@ const writeOptions = { sync: true };
* @param {String} message - the Error message. * @param {String} message - the Error message.
* @returns {Error} the Error object. * @returns {Error} the Error object.
*/ */
function propError(propName, message) { function propError(propName: string, message: string): Error {
const err = new Error(message); const err = new Error(message);
err[propName] = true; err[propName] = true;
return err; return err;
@ -27,7 +27,7 @@ function propError(propName, message) {
/** /**
* Running transaction with multiple updates to be committed atomically * Running transaction with multiple updates to be committed atomically
*/ */
class IndexTransaction { export class IndexTransaction {
/** /**
* Builds a new transaction * Builds a new transaction
* *
@ -36,7 +36,7 @@ class IndexTransaction {
* *
* @returns {IndexTransaction} a new empty transaction * @returns {IndexTransaction} a new empty transaction
*/ */
constructor(db) { constructor(db: LevelDB) {
this.operations = []; this.operations = [];
this.db = db; this.db = db;
this.closed = false; this.closed = false;
@ -63,13 +63,17 @@ class IndexTransaction {
*/ */
push(op) { push(op) {
if (this.closed) { if (this.closed) {
throw propError('pushOnCommittedTransaction', throw propError(
'can not add ops to already committed transaction'); 'pushOnCommittedTransaction',
'can not add ops to already committed transaction'
);
} }
if (op.type !== 'put' && op.type !== 'del') { if (op.type !== 'put' && op.type !== 'del') {
throw propError('invalidTransactionVerb', throw propError(
`unknown action type: ${op.type}`); 'invalidTransactionVerb',
`unknown action type: ${op.type}`
);
} }
if (op.key === undefined) { if (op.key === undefined) {
@ -136,14 +140,22 @@ class IndexTransaction {
*/ */
addCondition(condition) { addCondition(condition) {
if (this.closed) { if (this.closed) {
throw propError('pushOnCommittedTransaction', throw propError(
'can not add conditions to already committed transaction'); 'pushOnCommittedTransaction',
'can not add conditions to already committed transaction'
);
} }
if (condition === undefined || Object.keys(condition).length === 0) { if (condition === undefined || Object.keys(condition).length === 0) {
throw propError('missingCondition', 'missing condition for conditional put'); throw propError(
'missingCondition',
'missing condition for conditional put'
);
} }
if (typeof (condition.notExists) !== 'string') { if (typeof condition.notExists !== 'string') {
throw propError('unsupportedConditionalOperation', 'missing key or supported condition'); throw propError(
'unsupportedConditionalOperation',
'missing key or supported condition'
);
} }
this.conditions.push(condition); this.conditions.push(condition);
} }
@ -158,13 +170,21 @@ class IndexTransaction {
*/ */
commit(cb) { commit(cb) {
if (this.closed) { if (this.closed) {
return cb(propError('alreadyCommitted', return cb(
'transaction was already committed')); propError(
'alreadyCommitted',
'transaction was already committed'
)
);
} }
if (this.operations.length === 0) { if (this.operations.length === 0) {
return cb(propError('emptyTransaction', return cb(
'tried to commit an empty transaction')); propError(
'emptyTransaction',
'tried to commit an empty transaction'
)
);
} }
this.closed = true; this.closed = true;
@ -176,7 +196,3 @@ class IndexTransaction {
return this.db.batch(this.operations, writeOptions, cb); return this.db.batch(this.operations, writeOptions, cb);
} }
} }
module.exports = {
IndexTransaction,
};

View File

@ -1,4 +1,4 @@
function reshapeExceptionError(error) { export function reshapeExceptionError(error) {
const { message, code, stack, name } = error; const { message, code, stack, name } = error;
return { return {
message, message,
@ -7,7 +7,3 @@ function reshapeExceptionError(error) {
name, name,
}; };
} }
module.exports = {
reshapeExceptionError,
};

View File

@ -1,11 +1,14 @@
'use strict'; // eslint-disable-line strict import errorsObj from '../errors/arsenalErrors.json';
/** /**
* ArsenalError * ArsenalError
* *
* @extends {Error} * @extends {Error}
*/ */
class ArsenalError extends Error { export class ArsenalError extends Error {
code: number
description: string
/** /**
* constructor. * constructor.
* *
@ -13,7 +16,7 @@ class ArsenalError extends Error {
* @param {number} code - HTTP status code * @param {number} code - HTTP status code
* @param {string} desc - Verbose description of error * @param {string} desc - Verbose description of error
*/ */
constructor(type, code, desc) { constructor(type: string, code: number, desc: string) {
super(type); super(type);
/** /**
@ -65,23 +68,12 @@ class ArsenalError extends Error {
} }
} }
/** const errors: { [key: string]: ArsenalError } = {};
* Generate an Errors instances object. Object.keys(errorsObj)
* .filter((index) => index !== '_comment')
* @returns {Object.<string, ArsenalError>} - object field by arsenalError .forEach((index) => {
* instances const { code, description } = errorsObj[index];
*/ errors[index] = new ArsenalError(index, code, description);
function errorsGen() {
const errors = {};
const errorsObj = require('../errors/arsenalErrors.json');
Object.keys(errorsObj)
.filter(index => index !== '_comment')
.forEach(index => {
errors[index] = new ArsenalError(index, errorsObj[index].code,
errorsObj[index].description);
}); });
return errors;
}
module.exports = errorsGen(); export default errors;

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line strict export const ciphers = [
const ciphers = [
'DHE-RSA-AES128-GCM-SHA256', 'DHE-RSA-AES128-GCM-SHA256',
'ECDHE-ECDSA-AES128-GCM-SHA256', 'ECDHE-ECDSA-AES128-GCM-SHA256',
'ECDHE-RSA-AES256-GCM-SHA384', 'ECDHE-RSA-AES256-GCM-SHA384',
@ -28,7 +26,3 @@ const ciphers = [
'!EDH-RSA-DES-CBC3-SHA', '!EDH-RSA-DES-CBC3-SHA',
'!KRB5-DES-CBC3-SHA', '!KRB5-DES-CBC3-SHA',
].join(':'); ].join(':');
module.exports = {
ciphers,
};

View File

@ -29,16 +29,11 @@ c2CNfUEqyRbJF4pE9ZcdQReT5p/llmyhQdvq6cHH+cKJk63C6DHRVoStsnsUcvKe
bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg== bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg==
-----END DH PARAMETERS----- -----END DH PARAMETERS-----
*/ */
'use strict'; // eslint-disable-line strict
const dhparam = export const dhparam =
'MIIBCAKCAQEAh99T77KGNuiY9N6xrCJ3QNv4SFADTa3CD+1VMTAdRJLHUNpglB+i' + 'MIIBCAKCAQEAh99T77KGNuiY9N6xrCJ3QNv4SFADTa3CD+1VMTAdRJLHUNpglB+i' +
'AoTYiLDFZgtTCpx0ZZUD+JM3qiCZy0OK5/ZGlVD7sZmxjRtdpVK4qIPtwav8t0J7' + 'AoTYiLDFZgtTCpx0ZZUD+JM3qiCZy0OK5/ZGlVD7sZmxjRtdpVK4qIPtwav8t0J7' +
'c2CNfUEqyRbJF4pE9ZcdQReT5p/llmyhQdvq6cHH+cKJk63C6DHRVoStsnsUcvKe' + 'c2CNfUEqyRbJF4pE9ZcdQReT5p/llmyhQdvq6cHH+cKJk63C6DHRVoStsnsUcvKe' +
'23PLGZulKg8H3eRBxHamHkmyuEVDtoNhMIoJONsdXSpo5GgcD4EQMM8xb/qsnCxn' + '23PLGZulKg8H3eRBxHamHkmyuEVDtoNhMIoJONsdXSpo5GgcD4EQMM8xb/qsnCxn' +
'6QIGTBvcHskxtlTZOfUPk4XQ6Yb3tQi2TurzkQHLln4U7p/GZs+D+6D3SgSPqr6P' + '6QIGTBvcHskxtlTZOfUPk4XQ6Yb3tQi2TurzkQHLln4U7p/GZs+D+6D3SgSPqr6P' +
'bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg=='; 'bLxKsygK77ttjr61cxLoDJeGd5L5h1CPmwIBAg==';
module.exports = {
dhparam,
};

2
lib/https/index.ts Normal file
View File

@ -0,0 +1,2 @@
export * as ciphers from './ciphers'
export * as dhparam from './dh2048'

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line strict import ipaddr from 'ipaddr.js';
const ipaddr = require('ipaddr.js');
/** /**
* checkIPinRangeOrMatch checks whether a given ip address is in an ip address * checkIPinRangeOrMatch checks whether a given ip address is in an ip address
@ -9,7 +7,7 @@ const ipaddr = require('ipaddr.js');
* @param {object} ip - parsed ip address * @param {object} ip - parsed ip address
* @return {boolean} true if in range, false if not * @return {boolean} true if in range, false if not
*/ */
function checkIPinRangeOrMatch(cidr, ip) { export function checkIPinRangeOrMatch(cidr, ip) {
// If there is an exact match of the ip address, no need to check ranges // If there is an exact match of the ip address, no need to check ranges
if (ip.toString() === cidr) { if (ip.toString() === cidr) {
return true; return true;
@ -39,7 +37,7 @@ function checkIPinRangeOrMatch(cidr, ip) {
* @param {string} ip - IPV4/IPV6/IPV4-mapped IPV6 address * @param {string} ip - IPV4/IPV6/IPV4-mapped IPV6 address
* @return {object} parsedIp - Object representation of parsed IP * @return {object} parsedIp - Object representation of parsed IP
*/ */
function parseIp(ip) { export function parseIp(ip) {
if (ipaddr.IPv4.isValid(ip)) { if (ipaddr.IPv4.isValid(ip)) {
return ipaddr.parse(ip); return ipaddr.parse(ip);
} }
@ -60,7 +58,7 @@ function parseIp(ip) {
* @param {string} ip - IP address * @param {string} ip - IP address
* @return {boolean} - true if there is match or false for no match * @return {boolean} - true if there is match or false for no match
*/ */
function ipMatchCidrList(cidrList, ip) { export function ipMatchCidrList(cidrList, ip) {
const parsedIp = parseIp(ip); const parsedIp = parseIp(ip);
return cidrList.some(item => { return cidrList.some(item => {
let cidr; let cidr;
@ -75,9 +73,3 @@ function ipMatchCidrList(cidrList, ip) {
return checkIPinRangeOrMatch(cidr || item, parsedIp); return checkIPinRangeOrMatch(cidr || item, parsedIp);
}); });
} }
module.exports = {
checkIPinRangeOrMatch,
ipMatchCidrList,
parseIp,
};

View File

@ -1,6 +1,5 @@
'use strict'; // eslint-disable-line import { debuglog } from 'util';
const debug = debuglog('jsutil');
const debug = require('util').debuglog('jsutil');
// JavaScript utility functions // JavaScript utility functions
@ -17,7 +16,7 @@ const debug = require('util').debuglog('jsutil');
* @return {function} a callable wrapper mirroring <tt>func</tt> but * @return {function} a callable wrapper mirroring <tt>func</tt> but
* only calls <tt>func</tt> at first invocation. * only calls <tt>func</tt> at first invocation.
*/ */
module.exports.once = function once(func) { export function once(func) {
const state = { called: false, res: undefined }; const state = { called: false, res: undefined };
return function wrapper(...args) { return function wrapper(...args) {
if (!state.called) { if (!state.called) {

View File

@ -1,6 +1,8 @@
const Redis = require('ioredis'); import Redis from 'ioredis';
export default class RedisClient {
_client: Redis
class RedisClient {
/** /**
* @constructor * @constructor
* @param {Object} config - config * @param {Object} config - config
@ -11,13 +13,13 @@ class RedisClient {
*/ */
constructor(config, logger) { constructor(config, logger) {
this._client = new Redis(config); this._client = new Redis(config);
this._client.on('error', err => this._client.on('error', (err) =>
logger.trace('error from redis', { logger.trace('error from redis', {
error: err, error: err,
method: 'RedisClient.constructor', method: 'RedisClient.constructor',
redisHost: config.host, redisHost: config.host,
redisPort: config.port, redisPort: config.port,
}), })
); );
return this; return this;
} }
@ -29,12 +31,12 @@ class RedisClient {
* @param {callback} cb - callback (error, result) * @param {callback} cb - callback (error, result)
* @return {undefined} * @return {undefined}
*/ */
scan(pattern, count = 10, cb) { scan(pattern: string, count = 10, cb) {
const params = { match: pattern, count }; const params = { match: pattern, count };
const keys = []; const keys = [];
const stream = this._client.scanStream(params); const stream = this._client.scanStream(params);
stream.on('data', resultKeys => { stream.on('data', (resultKeys) => {
for (let i = 0; i < resultKeys.length; i++) { for (let i = 0; i < resultKeys.length; i++) {
keys.push(resultKeys[i]); keys.push(resultKeys[i]);
} }
@ -51,9 +53,12 @@ class RedisClient {
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
incrEx(key, expiry, cb) { incrEx(key: string, expiry: number, cb) {
return this._client return this._client
.multi([['incr', key], ['expire', key, expiry]]) .multi([
['incr', key],
['expire', key, expiry],
])
.exec(cb); .exec(cb);
} }
@ -64,7 +69,7 @@ class RedisClient {
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
incrby(key, amount, cb) { incrby(key: string, amount: number, cb) {
return this._client.incrby(key, amount, cb); return this._client.incrby(key, amount, cb);
} }
@ -76,9 +81,12 @@ class RedisClient {
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
incrbyEx(key, amount, expiry, cb) { incrbyEx(key: string, amount: number, expiry: number, cb) {
return this._client return this._client
.multi([['incrby', key, amount], ['expire', key, expiry]]) .multi([
['incrby', key, amount],
['expire', key, expiry],
])
.exec(cb); .exec(cb);
} }
@ -89,7 +97,7 @@ class RedisClient {
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
decrby(key, amount, cb) { decrby(key: string, amount: number, cb) {
return this._client.decrby(key, amount, cb); return this._client.decrby(key, amount, cb);
} }
@ -99,7 +107,7 @@ class RedisClient {
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
get(key, cb) { get(key: string, cb) {
return this._client.get(key, cb); return this._client.get(key, cb);
} }
@ -111,7 +119,7 @@ class RedisClient {
* If cb response returns 1, key exists. * If cb response returns 1, key exists.
* @return {undefined} * @return {undefined}
*/ */
exists(key, cb) { exists(key: string, cb) {
return this._client.exists(key, cb); return this._client.exists(key, cb);
} }
@ -121,7 +129,7 @@ class RedisClient {
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
batch(cmds, cb) { batch(cmds: string[], cb) {
return this._client.pipeline(cmds).exec(cb); return this._client.pipeline(cmds).exec(cb);
} }
@ -134,7 +142,7 @@ class RedisClient {
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
zadd(key, score, value, cb) { zadd(key: string, score: number, value: string, cb) {
return this._client.zadd(key, score, value, cb); return this._client.zadd(key, score, value, cb);
} }
@ -147,7 +155,7 @@ class RedisClient {
* @param {function} cb - callback * @param {function} cb - callback
* @return {undefined} * @return {undefined}
*/ */
zcard(key, cb) { zcard(key: string, cb) {
return this._client.zcard(key, cb); return this._client.zcard(key, cb);
} }
@ -161,7 +169,7 @@ class RedisClient {
* @param {function} cb - callback * @param {function} cb - callback
* @return {undefined} * @return {undefined}
*/ */
zscore(key, value, cb) { zscore(key: string, value: string, cb) {
return this._client.zscore(key, value, cb); return this._client.zscore(key, value, cb);
} }
@ -174,7 +182,7 @@ class RedisClient {
* The cb response returns number of values removed * The cb response returns number of values removed
* @return {undefined} * @return {undefined}
*/ */
zrem(key, value, cb) { zrem(key: string, value: string | any[], cb) {
return this._client.zrem(key, value, cb); return this._client.zrem(key, value, cb);
} }
@ -186,7 +194,7 @@ class RedisClient {
* @param {function} cb - callback * @param {function} cb - callback
* @return {undefined} * @return {undefined}
*/ */
zrange(key, start, end, cb) { zrange(key: string, start: number, end: number, cb) {
return this._client.zrange(key, start, end, cb); return this._client.zrange(key, start, end, cb);
} }
@ -200,7 +208,7 @@ class RedisClient {
* @param {function} cb - callback * @param {function} cb - callback
* @return {undefined} * @return {undefined}
*/ */
zrangebyscore(key, min, max, cb) { zrangebyscore(key: string, min: number | string, max: number | string, cb) {
return this._client.zrangebyscore(key, min, max, cb); return this._client.zrangebyscore(key, min, max, cb);
} }
@ -210,7 +218,7 @@ class RedisClient {
* @param {function} cb - callback * @param {function} cb - callback
* @return {undefined} * @return {undefined}
*/ */
ttl(key, cb) { ttl(key: string, cb) {
return this._client.ttl(key, cb); return this._client.ttl(key, cb);
} }
@ -226,5 +234,3 @@ class RedisClient {
return this._client.client('list', cb); return this._client.client('list', cb);
} }
} }
module.exports = RedisClient;

View File

@ -1,13 +1,18 @@
const async = require('async'); import async from 'async';
import RedisClient from './RedisClient';
export default class StatsClient {
_redis?: RedisClient;
_interval: number;
_expiry: number;
class StatsClient {
/** /**
* @constructor * @constructor
* @param {object} redisClient - RedisClient instance * @param {object} redisClient - RedisClient instance
* @param {number} interval - sampling interval by seconds * @param {number} interval - sampling interval by seconds
* @param {number} expiry - sampling duration by seconds * @param {number} expiry - sampling duration by seconds
*/ */
constructor(redisClient, interval, expiry) { constructor(redisClient: RedisClient, interval: number, expiry: number) {
this._redis = redisClient; this._redis = redisClient;
this._interval = interval; this._interval = interval;
this._expiry = expiry; this._expiry = expiry;
@ -24,9 +29,9 @@ class StatsClient {
* @param {object} d - Date instance * @param {object} d - Date instance
* @return {number} timestamp - normalized to the nearest interval * @return {number} timestamp - normalized to the nearest interval
*/ */
_normalizeTimestamp(d) { _normalizeTimestamp(d: Date): number {
const s = d.getSeconds(); const s = d.getSeconds();
return d.setSeconds(s - s % this._interval, 0); return d.setSeconds(s - (s % this._interval), 0);
} }
/** /**
@ -34,7 +39,7 @@ class StatsClient {
* @param {object} d - Date instance * @param {object} d - Date instance
* @return {number} timestamp - set to the previous interval * @return {number} timestamp - set to the previous interval
*/ */
_setPrevInterval(d) { _setPrevInterval(d: Date): number {
return d.setSeconds(d.getSeconds() - this._interval); return d.setSeconds(d.getSeconds() - this._interval);
} }
@ -44,7 +49,7 @@ class StatsClient {
* @param {Date} date - Date instance * @param {Date} date - Date instance
* @return {string} key - key for redis * @return {string} key - key for redis
*/ */
buildKey(name, date) { buildKey(name: string, date: Date): string {
return `${name}:${this._normalizeTimestamp(date)}`; return `${name}:${this._normalizeTimestamp(date)}`;
} }
@ -54,7 +59,7 @@ class StatsClient {
* @param {array} arr - Date instance * @param {array} arr - Date instance
* @return {string} key - key for redis * @return {string} key - key for redis
*/ */
_getCount(arr) { _getCount(arr: any[]): string {
return arr.reduce((prev, a) => { return arr.reduce((prev, a) => {
let num = parseInt(a[1], 10); let num = parseInt(a[1], 10);
num = Number.isNaN(num) ? 0 : num; num = Number.isNaN(num) ? 0 : num;
@ -69,7 +74,7 @@ class StatsClient {
* @param {function} cb - callback * @param {function} cb - callback
* @return {undefined} * @return {undefined}
*/ */
reportNewRequest(id, incr, cb) { reportNewRequest(id: string, incr: number, cb) {
if (!this._redis) { if (!this._redis) {
return undefined; return undefined;
} }
@ -81,8 +86,8 @@ class StatsClient {
callback = incr; callback = incr;
amount = 1; amount = 1;
} else { } else {
callback = (cb && typeof cb === 'function') ? cb : this._noop; callback = cb && typeof cb === 'function' ? cb : this._noop;
amount = (typeof incr === 'number') ? incr : 1; amount = typeof incr === 'number' ? incr : 1;
} }
const key = this.buildKey(`${id}:requests`, new Date()); const key = this.buildKey(`${id}:requests`, new Date());
@ -97,7 +102,7 @@ class StatsClient {
* @param {function} [cb] - callback * @param {function} [cb] - callback
* @return {undefined} * @return {undefined}
*/ */
incrementKey(key, incr, cb) { incrementKey(key: string, incr: number, cb) {
const callback = cb || this._noop; const callback = cb || this._noop;
return this._redis.incrby(key, incr, callback); return this._redis.incrby(key, incr, callback);
} }
@ -109,7 +114,7 @@ class StatsClient {
* @param {function} [cb] - callback * @param {function} [cb] - callback
* @return {undefined} * @return {undefined}
*/ */
decrementKey(key, decr, cb) { decrementKey(key: string, decr: number, cb) {
const callback = cb || this._noop; const callback = cb || this._noop;
return this._redis.decrby(key, decr, callback); return this._redis.decrby(key, decr, callback);
} }
@ -120,7 +125,7 @@ class StatsClient {
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
report500(id, cb) { report500(id: string, cb) {
if (!this._redis) { if (!this._redis) {
return undefined; return undefined;
} }
@ -136,21 +141,24 @@ class StatsClient {
* @param {callback} cb - callback to call with the err/result * @param {callback} cb - callback to call with the err/result
* @return {undefined} * @return {undefined}
*/ */
getAllStats(log, ids, cb) { getAllStats(log, ids: any[], cb) {
if (!this._redis) { if (!this._redis) {
return cb(null, {}); return cb(null, {});
} }
const statsRes = { const statsRes = {
'requests': 0, requests: 0,
'500s': 0, '500s': 0,
'sampleDuration': this._expiry, sampleDuration: this._expiry,
}; };
let requests = 0; let requests = 0;
let errors = 0; let errors = 0;
// for now set concurrency to default of 10 // for now set concurrency to default of 10
return async.eachLimit(ids, 10, (id, done) => { return async.eachLimit(
ids,
10,
(id, done) => {
this.getStats(log, id, (err, res) => { this.getStats(log, id, (err, res) => {
if (err) { if (err) {
return done(err); return done(err);
@ -159,7 +167,8 @@ class StatsClient {
errors += res['500s']; errors += res['500s'];
return done(); return done();
}); });
}, error => { },
(error) => {
if (error) { if (error) {
log.error('error getting stats', { log.error('error getting stats', {
error, error,
@ -170,7 +179,8 @@ class StatsClient {
statsRes.requests = requests; statsRes.requests = requests;
statsRes['500s'] = errors; statsRes['500s'] = errors;
return cb(null, statsRes); return cb(null, statsRes);
}); }
);
} }
/** /**
@ -180,7 +190,7 @@ class StatsClient {
* @param {callback} cb - callback to call with the err/result * @param {callback} cb - callback to call with the err/result
* @return {undefined} * @return {undefined}
*/ */
getStats(log, id, cb) { getStats(log, id: string, cb) {
if (!this._redis) { if (!this._redis) {
return cb(null, {}); return cb(null, {});
} }
@ -205,9 +215,9 @@ class StatsClient {
* index 1 contains the result * index 1 contains the result
*/ */
const statsRes = { const statsRes = {
'requests': 0, requests: 0,
'500s': 0, '500s': 0,
'sampleDuration': this._expiry, sampleDuration: this._expiry,
}; };
if (err) { if (err) {
log.error('error getting stats', { log.error('error getting stats', {
@ -227,5 +237,3 @@ class StatsClient {
}); });
} }
} }
module.exports = StatsClient;

View File

@ -1,6 +1,5 @@
const async = require('async'); import async from 'async';
import StatsClient from './StatsClient';
const StatsClient = require('./StatsClient');
/** /**
* @class StatsModel * @class StatsModel
@ -8,16 +7,16 @@ const StatsClient = require('./StatsClient');
* @classdesc Extend and overwrite how timestamps are normalized by minutes * @classdesc Extend and overwrite how timestamps are normalized by minutes
* rather than by seconds * rather than by seconds
*/ */
class StatsModel extends StatsClient { export default class StatsModel extends StatsClient {
/** /**
* Utility method to convert 2d array rows to columns, and vice versa * Utility method to convert 2d array rows to columns, and vice versa
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip * See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip
* @param {array} arrays - 2d array of integers * @param {array} arrays - 2d array of integers
* @return {array} converted array * @return {array} converted array
*/ */
_zip(arrays) { _zip(arrays: number[][]) {
if (arrays.length > 0 && arrays.every(a => Array.isArray(a))) { if (arrays.length > 0 && arrays.every((a) => Array.isArray(a))) {
return arrays[0].map((_, i) => arrays.map(a => a[i])); return arrays[0].map((_, i) => arrays.map((a) => a[i]));
} }
return []; return [];
} }
@ -27,9 +26,9 @@ class StatsModel extends StatsClient {
* @param {object} d - Date instance * @param {object} d - Date instance
* @return {number} timestamp - normalized to the nearest interval * @return {number} timestamp - normalized to the nearest interval
*/ */
_normalizeTimestamp(d) { _normalizeTimestamp(d: Date) {
const m = d.getMinutes(); const m = d.getMinutes();
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0); return d.setMinutes(m - (m % Math.floor(this._interval / 60)), 0, 0);
} }
/** /**
@ -64,16 +63,16 @@ class StatsModel extends StatsClient {
* @param {callback} cb - callback to call with the err/result * @param {callback} cb - callback to call with the err/result
* @return {undefined} * @return {undefined}
*/ */
getAllStats(log, ids, cb) { getAllStats(log, ids: string[], cb) {
if (!this._redis) { if (!this._redis) {
return cb(null, {}); return cb(null, {});
} }
const size = Math.floor(this._expiry / this._interval); const size = Math.floor(this._expiry / this._interval);
const statsRes = { const statsRes = {
'requests': Array(size).fill(0), requests: Array(size).fill(0),
'500s': Array(size).fill(0), '500s': Array(size).fill(0),
'sampleDuration': this._expiry, sampleDuration: this._expiry,
}; };
const requests = []; const requests = [];
const errors = []; const errors = [];
@ -117,9 +116,9 @@ class StatsModel extends StatsClient {
* @param {function} cb - Callback * @param {function} cb - Callback
* @return {undefined} * @return {undefined}
*/ */
getAllGlobalStats(ids, log, cb) { getAllGlobalStats(ids: string[], log, cb) {
const reqsKeys = ids.map(key => (['get', key])); const reqsKeys = ids.map((key) => ['get', key]);
return this._redis.batch(reqsKeys, (err, res) => { return this._redis!.batch(reqsKeys, (err, res) => {
const statsRes = { requests: 0 }; const statsRes = { requests: 0 };
if (err) { if (err) {
log.error('error getting metrics', { log.error('error getting metrics', {
@ -148,7 +147,7 @@ class StatsModel extends StatsClient {
* @param {Date} d - Date instance * @param {Date} d - Date instance
* @return {number} timestamp - normalized to the nearest hour * @return {number} timestamp - normalized to the nearest hour
*/ */
normalizeTimestampByHour(d) { normalizeTimestampByHour(d: Date) {
return d.setMinutes(0, 0, 0); return d.setMinutes(0, 0, 0);
} }
@ -157,7 +156,7 @@ class StatsModel extends StatsClient {
* @param {Date} d - Date instance * @param {Date} d - Date instance
* @return {number} timestamp - one hour prior to date passed * @return {number} timestamp - one hour prior to date passed
*/ */
_getDatePreviousHour(d) { _getDatePreviousHour(d: Date) {
return d.setHours(d.getHours() - 1); return d.setHours(d.getHours() - 1);
} }
@ -166,8 +165,8 @@ class StatsModel extends StatsClient {
* @param {number} epoch - epoch time * @param {number} epoch - epoch time
* @return {array} array of sorted set key timestamps * @return {array} array of sorted set key timestamps
*/ */
getSortedSetHours(epoch) { getSortedSetHours(epoch: number) {
const timestamps = []; const timestamps: number[] = [];
let date = this.normalizeTimestampByHour(new Date(epoch)); let date = this.normalizeTimestampByHour(new Date(epoch));
while (timestamps.length < 24) { while (timestamps.length < 24) {
timestamps.push(date); timestamps.push(date);
@ -181,7 +180,7 @@ class StatsModel extends StatsClient {
* @param {number} epoch - epoch time * @param {number} epoch - epoch time
* @return {string} normalized hour timestamp for given time * @return {string} normalized hour timestamp for given time
*/ */
getSortedSetCurrentHour(epoch) { getSortedSetCurrentHour(epoch: number) {
return this.normalizeTimestampByHour(new Date(epoch)); return this.normalizeTimestampByHour(new Date(epoch));
} }
@ -193,8 +192,8 @@ class StatsModel extends StatsClient {
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
addToSortedSet(key, score, value, cb) { addToSortedSet(key: string, score: number, value: string, cb) {
this._redis.exists(key, (err, resCode) => { this._redis!.exists(key, (err, resCode) => {
if (err) { if (err) {
return cb(err); return cb(err);
} }
@ -203,8 +202,7 @@ class StatsModel extends StatsClient {
const msInADay = 24 * 60 * 60 * 1000; const msInADay = 24 * 60 * 60 * 1000;
const nearestHour = this.normalizeTimestampByHour(new Date()); const nearestHour = this.normalizeTimestampByHour(new Date());
// in seconds // in seconds
const ttl = Math.ceil( const ttl = Math.ceil((msInADay - (Date.now() - nearestHour)) / 1000);
(msInADay - (Date.now() - nearestHour)) / 1000);
const cmds = [ const cmds = [
['zadd', key, score, value], ['zadd', key, score, value],
['expire', key, ttl], ['expire', key, ttl],
@ -213,7 +211,7 @@ class StatsModel extends StatsClient {
if (err) { if (err) {
return cb(err); return cb(err);
} }
const cmdErr = res.find(r => r[0] !== null); const cmdErr = res.find((r) => r[0] !== null);
if (cmdErr) { if (cmdErr) {
return cb(cmdErr); return cb(cmdErr);
} }
@ -221,9 +219,7 @@ class StatsModel extends StatsClient {
return cb(null, successResponse); return cb(null, successResponse);
}); });
} }
return this._redis.zadd(key, score, value, cb); return this._redis!.zadd(key, score, value, cb);
}); });
} }
} }
module.exports = StatsModel;

View File

@ -1,13 +1,13 @@
const promClient = require('prom-client'); import promClient from 'prom-client';
const collectDefaultMetricsIntervalMs = const collectDefaultMetricsIntervalMs =
process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined ? process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined
Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10) : ? Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10)
10000; : 10000;
promClient.collectDefaultMetrics({ timeout: collectDefaultMetricsIntervalMs }); promClient.collectDefaultMetrics({ timeout: collectDefaultMetricsIntervalMs });
class ZenkoMetrics { export default class ZenkoMetrics {
static createCounter(params) { static createCounter(params) {
return new promClient.Counter(params); return new promClient.Counter(params);
} }
@ -36,5 +36,3 @@ class ZenkoMetrics {
return promClient.register.contentType; return promClient.register.contentType;
} }
} }
module.exports = ZenkoMetrics;

4
lib/metrics/index.ts Normal file
View File

@ -0,0 +1,4 @@
export { default as StatsClient } from './StatsClient';
export { default as StatsModel } from './StatsModel';
export { default as RedisClient } from './RedisClient';
export { default as ZenkoMetrics } from './ZenkoMetrics';

View File

@ -1,11 +1,17 @@
const errors = require('../errors'); import errors from '../errors';
const validServices = { const validServices = {
aws: ['s3', 'iam', 'sts', 'ring'], aws: ['s3', 'iam', 'sts', 'ring'],
scality: ['utapi', 'sso'], scality: ['utapi', 'sso'],
}; };
class ARN { export default class ARN {
_partition: string;
_service: string;
_region: string | null;
_accountId: string | null;
_resource: string;
/** /**
* *
* Create an ARN object from its individual components * Create an ARN object from its individual components
@ -17,7 +23,7 @@ class ARN {
* @param {string} [accountId] - AWS 12-digit account ID * @param {string} [accountId] - AWS 12-digit account ID
* @param {string} resource - AWS resource path (e.g. 'foo/bar') * @param {string} resource - AWS resource path (e.g. 'foo/bar')
*/ */
constructor(partition, service, region, accountId, resource) { constructor(partition: string, service: string, region: string, accountId: string, resource: string) {
this._partition = partition; this._partition = partition;
this._service = service; this._service = service;
this._region = region || null; this._region = region || null;
@ -25,7 +31,7 @@ class ARN {
this._resource = resource; this._resource = resource;
} }
static createFromString(arnStr) { static createFromString(arnStr: string) {
const [arn, partition, service, region, accountId, const [arn, partition, service, region, accountId,
resourceType, resource] = arnStr.split(':'); resourceType, resource] = arnStr.split(':');
@ -57,8 +63,8 @@ class ARN {
`bad ARN: bad account ID "${accountId}": ` + `bad ARN: bad account ID "${accountId}": ` +
'must be a 12-digit number or "*"') }; 'must be a 12-digit number or "*"') };
} }
const fullResource = (resource !== undefined ? const fullResource = resource !== undefined ?
`${resourceType}:${resource}` : resourceType); `${resourceType}:${resource}` : resourceType;
return new ARN(partition, service, region, accountId, fullResource); return new ARN(partition, service, region, accountId, fullResource);
} }
@ -79,21 +85,21 @@ class ARN {
} }
isIAMAccount() { isIAMAccount() {
return this.getService() === 'iam' return this.getService() === 'iam' &&
&& this.getAccountId() !== null this.getAccountId() !== null &&
&& this.getAccountId() !== '*' this.getAccountId() !== '*' &&
&& this.getResource() === 'root'; this.getResource() === 'root';
} }
isIAMUser() { isIAMUser() {
return this.getService() === 'iam' return this.getService() === 'iam' &&
&& this.getAccountId() !== null this.getAccountId() !== null &&
&& this.getAccountId() !== '*' this.getAccountId() !== '*' &&
&& this.getResource().startsWith('user/'); this.getResource().startsWith('user/');
} }
isIAMRole() { isIAMRole() {
return this.getService() === 'iam' return this.getService() === 'iam' &&
&& this.getAccountId() !== null this.getAccountId() !== null &&
&& this.getResource().startsWith('role'); this.getResource().startsWith('role');
} }
toString() { toString() {
@ -102,5 +108,3 @@ class ARN {
.join(':'); .join(':');
} }
} }
module.exports = ARN;

View File

@ -1,7 +1,13 @@
const { legacyLocations } = require('../constants'); import { legacyLocations } from '../constants';
const escapeForXml = require('../s3middleware/escapeForXml'); import escapeForXml from '../s3middleware/escapeForXml';
export default class BackendInfo {
_config;
_objectLocationConstraint;
_bucketLocationConstraint;
_requestEndpoint;
_legacyLocationConstraint;
class BackendInfo {
/** /**
* Represents the info necessary to evaluate which data backend to use * Represents the info necessary to evaluate which data backend to use
* on a data put call. * on a data put call.
@ -236,5 +242,3 @@ class BackendInfo {
return this._config.backends.data; return this._config.backends.data;
} }
} }
module.exports = BackendInfo;

View File

@ -2,7 +2,9 @@
* Helper class to ease access to the Azure specific information for * Helper class to ease access to the Azure specific information for
* storage accounts mapped to buckets. * storage accounts mapped to buckets.
*/ */
class BucketAzureInfo { export default class BucketAzureInfo {
_data
/** /**
* @constructor * @constructor
* @param {object} obj - Raw structure for the Azure info on storage account * @param {object} obj - Raw structure for the Azure info on storage account
@ -233,5 +235,3 @@ class BucketAzureInfo {
return this._data; return this._data;
} }
} }
module.exports = BucketAzureInfo;

View File

@ -1,19 +1,44 @@
const assert = require('assert'); import assert from 'assert';
const uuid = require('uuid/v4'); import uuid from 'uuid/v4';
const { WebsiteConfiguration } = require('./WebsiteConfiguration'); import { WebsiteConfiguration } from './WebsiteConfiguration';
const ReplicationConfiguration = require('./ReplicationConfiguration'); import ReplicationConfiguration from './ReplicationConfiguration';
const LifecycleConfiguration = require('./LifecycleConfiguration'); import LifecycleConfiguration from './LifecycleConfiguration';
const ObjectLockConfiguration = require('./ObjectLockConfiguration'); import ObjectLockConfiguration from './ObjectLockConfiguration';
const BucketPolicy = require('./BucketPolicy'); import BucketPolicy from './BucketPolicy';
const NotificationConfiguration = require('./NotificationConfiguration'); import NotificationConfiguration from './NotificationConfiguration';
// WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG // WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG
// BucketInfoModelVersion.md can be found in documentation/ at the root // BucketInfoModelVersion.md can be found in documentation/ at the root
// of this repository // of this repository
const modelVersion = 14; const modelVersion = 14;
class BucketInfo { export default class BucketInfo {
_acl;
_name;
_owner;
_ownerDisplayName;
_creationDate;
_mdBucketModelVersion;
_transient;
_deleted;
_serverSideEncryption;
_versioningConfiguration;
_locationConstraint;
_readLocationConstraint;
_websiteConfiguration;
_replicationConfiguration;
_cors;
_lifecycleConfiguration;
_bucketPolicy;
_uid;
_isNFS;
_ingestion;
_azureInfo;
_objectLockEnabled;
_objectLockConfiguration;
_notificationConfiguration;
/** /**
* Represents all bucket information. * Represents all bucket information.
* @constructor * @constructor
@ -768,5 +793,3 @@ class BucketInfo {
return this; return this;
} }
} }
module.exports = BucketInfo;

View File

@ -1,7 +1,7 @@
const assert = require('assert'); import assert from 'assert';
const errors = require('../errors'); import errors from '../errors';
const { validateResourcePolicy } = require('../policy/policyValidator'); import { validateResourcePolicy } from '../policy/policyValidator';
/** /**
* Format of json policy: * Format of json policy:
@ -49,7 +49,10 @@ const objectActions = [
's3:PutObjectTagging', 's3:PutObjectTagging',
]; ];
class BucketPolicy { export default class BucketPolicy {
_json
_policy
/** /**
* Create a Bucket Policy instance * Create a Bucket Policy instance
* @param {string} json - the json policy * @param {string} json - the json policy
@ -75,8 +78,11 @@ class BucketPolicy {
*/ */
_getPolicy() { _getPolicy() {
if (!this._json || this._json === '') { if (!this._json || this._json === '') {
return { error: errors.MalformedPolicy.customizeDescription( return {
'request json is empty or undefined') }; error: errors.MalformedPolicy.customizeDescription(
'request json is empty or undefined'
),
};
} }
const validSchema = validateResourcePolicy(this._json); const validSchema = validateResourcePolicy(this._json);
if (validSchema.error) { if (validSchema.error) {
@ -104,25 +110,32 @@ class BucketPolicy {
* @return {error} - contains error or empty obj * @return {error} - contains error or empty obj
*/ */
_validateActionResource() { _validateActionResource() {
const invalid = this._policy.Statement.every(s => { const invalid = this._policy.Statement.every((s) => {
const actions = typeof s.Action === 'string' ? const actions =
[s.Action] : s.Action; typeof s.Action === 'string' ? [s.Action] : s.Action;
const resources = typeof s.Resource === 'string' ? const resources =
[s.Resource] : s.Resource; typeof s.Resource === 'string' ? [s.Resource] : s.Resource;
const objectAction = actions.some(a => const objectAction = actions.some(
a.includes('Object') || objectActions.includes(a)); (a) => a.includes('Object') || objectActions.includes(a)
);
// wildcardObjectAction checks for actions such as 's3:*' or // wildcardObjectAction checks for actions such as 's3:*' or
// 's3:Put*' but will return false for actions such as // 's3:Put*' but will return false for actions such as
// 's3:PutBucket*' // 's3:PutBucket*'
const wildcardObjectAction = actions.some( const wildcardObjectAction = actions.some(
a => a.includes('*') && !a.includes('Bucket')); (a) => a.includes('*') && !a.includes('Bucket')
const objectResource = resources.some(r => r.includes('/')); );
return ((objectAction && !objectResource) || const objectResource = resources.some((r) => r.includes('/'));
(objectResource && !objectAction && !wildcardObjectAction)); return (
(objectAction && !objectResource) ||
(objectResource && !objectAction && !wildcardObjectAction)
);
}); });
if (invalid) { if (invalid) {
return { error: errors.MalformedPolicy.customizeDescription( return {
'Action does not apply to any resource(s) in statement') }; error: errors.MalformedPolicy.customizeDescription(
'Action does not apply to any resource(s) in statement'
),
};
} }
return {}; return {};
} }
@ -139,5 +152,3 @@ class BucketPolicy {
assert.deepStrictEqual(validated, { error: null, valid: true }); assert.deepStrictEqual(validated, { error: null, valid: true });
} }
} }
module.exports = BucketPolicy;

View File

@ -1,9 +1,9 @@
const assert = require('assert'); import assert from 'assert';
const UUID = require('uuid'); import UUID from 'uuid';
const errors = require('../errors'); import errors from '../errors';
const LifecycleRule = require('./LifecycleRule'); import LifecycleRule from './LifecycleRule';
const escapeForXml = require('../s3middleware/escapeForXml'); import escapeForXml from '../s3middleware/escapeForXml';
const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer. const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer.
@ -83,7 +83,7 @@ const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer.
}; };
*/ */
class LifecycleConfiguration { export default class LifecycleConfiguration {
/** /**
* Create a Lifecycle Configuration instance * Create a Lifecycle Configuration instance
* @param {string} xml - the parsed xml * @param {string} xml - the parsed xml
@ -1220,5 +1220,3 @@ class LifecycleConfiguration {
return { Rules: rulesJSON }; return { Rules: rulesJSON };
} }
} }
module.exports = LifecycleConfiguration;

View File

@ -1,11 +1,11 @@
const uuid = require('uuid/v4'); import uuid from 'uuid/v4';
/** /**
* @class LifecycleRule * @class LifecycleRule
* *
* @classdesc Simple get/set class to build a single Rule * @classdesc Simple get/set class to build a single Rule
*/ */
class LifecycleRule { export default class LifecycleRule {
constructor(id, status) { constructor(id, status) {
// defaults // defaults
this.id = id || uuid(); this.id = id || uuid();
@ -134,5 +134,3 @@ class LifecycleRule {
return this; return this;
} }
} }
module.exports = LifecycleRule;

View File

@ -1,11 +1,11 @@
const assert = require('assert'); import assert from 'assert';
const UUID = require('uuid'); import UUID from 'uuid';
const { import {
supportedNotificationEvents, supportedNotificationEvents,
notificationArnPrefix, notificationArnPrefix,
} = require('../constants'); } from '../constants';
const errors = require('../errors'); import errors from '../errors';
/** /**
* Format of xml request: * Format of xml request:
@ -51,7 +51,7 @@ const errors = require('../errors');
* } * }
*/ */
class NotificationConfiguration { export default class NotificationConfiguration {
/** /**
* Create a Notification Configuration instance * Create a Notification Configuration instance
* @param {string} xml - parsed configuration xml * @param {string} xml - parsed configuration xml
@ -307,5 +307,3 @@ class NotificationConfiguration {
return; return;
} }
} }
module.exports = NotificationConfiguration;

View File

@ -1,6 +1,5 @@
const assert = require('assert'); import assert from 'assert';
import errors from '../errors';
const errors = require('../errors');
/** /**
* Format of xml request: * Format of xml request:
@ -27,7 +26,7 @@ const errors = require('../errors');
* } * }
* } * }
*/ */
class ObjectLockConfiguration { export default class ObjectLockConfiguration {
/** /**
* Create an Object Lock Configuration instance * Create an Object Lock Configuration instance
* @param {string} xml - the parsed configuration xml * @param {string} xml - the parsed configuration xml
@ -234,5 +233,3 @@ class ObjectLockConfiguration {
'</ObjectLockConfiguration>'; '</ObjectLockConfiguration>';
} }
} }
module.exports = ObjectLockConfiguration;

View File

@ -1,15 +1,15 @@
const crypto = require('crypto'); import * as crypto from 'crypto';
const constants = require('../constants'); import * as constants from '../constants';
const VersionIDUtils = require('../versioning/VersionID'); import * as VersionIDUtils from '../versioning/VersionID';
const ObjectMDLocation = require('./ObjectMDLocation'); import ObjectMDLocation from './ObjectMDLocation';
/** /**
* Class to manage metadata object for regular s3 objects (instead of * Class to manage metadata object for regular s3 objects (instead of
* mpuPart metadata for example) * mpuPart metadata for example)
*/ */
class ObjectMD { export default class ObjectMD {
/** /**
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is * Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
* reserved for internal use, users should call * reserved for internal use, users should call
@ -1189,5 +1189,3 @@ class ObjectMD {
return this._data; return this._data;
} }
} }
module.exports = ObjectMD;

View File

@ -2,7 +2,7 @@
* Helper class to ease access to the Azure specific information for * Helper class to ease access to the Azure specific information for
* Blob and Container objects. * Blob and Container objects.
*/ */
class ObjectMDAzureInfo { export default class ObjectMDAzureInfo {
/** /**
* @constructor * @constructor
* @param {object} obj - Raw structure for the Azure info on Blob/Container * @param {object} obj - Raw structure for the Azure info on Blob/Container
@ -158,5 +158,3 @@ class ObjectMDAzureInfo {
return this._data; return this._data;
} }
} }
module.exports = ObjectMDAzureInfo;

View File

@ -2,7 +2,7 @@
* Helper class to ease access to a single data location in metadata * Helper class to ease access to a single data location in metadata
* 'location' array * 'location' array
*/ */
class ObjectMDLocation { export default class ObjectMDLocation {
/** /**
* @constructor * @constructor
* @param {object} locationObj - single data location info * @param {object} locationObj - single data location info
@ -126,5 +126,3 @@ class ObjectMDLocation {
return this._data; return this._data;
} }
} }
module.exports = ObjectMDLocation;

View File

@ -1,9 +1,9 @@
const assert = require('assert'); import assert from 'assert';
const UUID = require('uuid'); import UUID from 'uuid';
const escapeForXml = require('../s3middleware/escapeForXml'); import escapeForXml from '../s3middleware/escapeForXml';
const errors = require('../errors'); import errors from '../errors';
const { isValidBucketName } = require('../s3routes/routesUtils'); import { isValidBucketName } from '../s3routes/routesUtils';
const MAX_RULES = 1000; const MAX_RULES = 1000;
const RULE_ID_LIMIT = 255; const RULE_ID_LIMIT = 255;
@ -37,7 +37,19 @@ const validStorageClasses = [
</ReplicationConfiguration> </ReplicationConfiguration>
*/ */
class ReplicationConfiguration { export default class ReplicationConfiguration {
_parsedXML
_log
_config
_configPrefixes
_configIDs
_role
_destination
_rules
_prevStorageClass
_hasScalityDestination
_preferredReadLocation
/** /**
* Create a ReplicationConfiguration instance * Create a ReplicationConfiguration instance
* @param {string} xml - The parsed XML * @param {string} xml - The parsed XML
@ -469,5 +481,3 @@ class ReplicationConfiguration {
}); });
} }
} }
module.exports = ReplicationConfiguration;

View File

@ -1,4 +1,7 @@
class RoutingRule { export class RoutingRule {
_redirect;
_condition;
/** /**
* Represents a routing rule in a website configuration. * Represents a routing rule in a website configuration.
* @constructor * @constructor
@ -52,7 +55,12 @@ class RoutingRule {
} }
} }
class WebsiteConfiguration { export class WebsiteConfiguration {
_indexDocument;
_errorDocument;
_redirectAllRequestsTo;
_routingRules;
/** /**
* Object that represents website configuration * Object that represents website configuration
* @constructor * @constructor
@ -188,8 +196,3 @@ class WebsiteConfiguration {
return this._routingRules; return this._routingRules;
} }
} }
module.exports = {
RoutingRule,
WebsiteConfiguration,
};

14
lib/models/index.ts Normal file
View File

@ -0,0 +1,14 @@
export { default as BackendInfo } from './BackendInfo';
export { default as BucketInfo } from './BucketInfo';
export { default as BucketAzureInfo } from './BucketAzureInfo';
export { default as ObjectMD } from './ObjectMD';
export { default as ObjectMDLocation } from './ObjectMDLocation';
export { default as ObjectMDAzureInfo } from './ObjectMDAzureInfo';
export { default as ARN } from './ARN';
export * as WebsiteConfiguration from './WebsiteConfiguration';
export { default as ReplicationConfiguration } from './ReplicationConfiguration';
export { default as LifecycleConfiguration } from './LifecycleConfiguration';
export { default as LifecycleRule } from './LifecycleRule';
export { default as BucketPolicy } from './BucketPolicy';
export { default as ObjectLockConfiguration } from './ObjectLockConfiguration';
export { default as NotificationConfiguration } from './NotificationConfiguration';

View File

@ -17,7 +17,7 @@ function shuffle(array) {
} }
} }
class RoundRobin { export default class RoundRobin {
/** /**
* @constructor * @constructor
* @param {object[]|string[]} hostsList - list of hosts to query * @param {object[]|string[]} hostsList - list of hosts to query
@ -167,5 +167,3 @@ class RoundRobin {
} }
} }
} }
module.exports = RoundRobin;

View File

@ -1,15 +1,13 @@
'use strict'; // eslint-disable-line import * as http from 'http';
import * as https from 'https';
const http = require('http'); import assert from 'assert';
const https = require('https'); import { dhparam } from '../../https/dh2048';
const assert = require('assert'); import { ciphers } from '../../https/ciphers';
const dhparam = require('../../https/dh2048').dhparam; import errors from '../../errors';
const ciphers = require('../../https/ciphers').ciphers; import { checkSupportIPv6 } from './utils';
const errors = require('../../errors');
const { checkSupportIPv6 } = require('./utils');
class Server { export default class Server {
/** /**
* @constructor * @constructor
* *
@ -458,5 +456,3 @@ class Server {
return this; return this;
} }
} }
module.exports = Server;

View File

@ -1,7 +1,5 @@
'use strict'; // eslint-disable-line import * as os from 'os';
import errors from '../../errors';
const os = require('os');
const errors = require('../../errors');
/** /**
* Parse the Range header into an object * Parse the Range header into an object
@ -16,7 +14,7 @@ const errors = require('../../errors');
* - an error attribute of type errors.InvalidArgument if the range * - an error attribute of type errors.InvalidArgument if the range
* syntax is invalid * syntax is invalid
*/ */
function parseRangeSpec(rangeHeader) { export function parseRangeSpec(rangeHeader) {
const rangeMatch = /^bytes=([0-9]+)?-([0-9]+)?$/.exec(rangeHeader); const rangeMatch = /^bytes=([0-9]+)?-([0-9]+)?$/.exec(rangeHeader);
if (rangeMatch) { if (rangeMatch) {
const rangeValues = rangeMatch.slice(1, 3); const rangeValues = rangeMatch.slice(1, 3);
@ -55,7 +53,7 @@ function parseRangeSpec(rangeHeader) {
* - or an 'error' attribute of type errors.InvalidRange if the * - or an 'error' attribute of type errors.InvalidRange if the
* requested range is out of object's boundaries. * requested range is out of object's boundaries.
*/ */
function getByteRangeFromSpec(rangeSpec, objectSize) { export function getByteRangeFromSpec(rangeSpec, objectSize) {
if (rangeSpec.suffix !== undefined) { if (rangeSpec.suffix !== undefined) {
if (rangeSpec.suffix === 0) { if (rangeSpec.suffix === 0) {
// 0-byte suffix is always invalid (even on empty objects) // 0-byte suffix is always invalid (even on empty objects)
@ -95,7 +93,7 @@ function getByteRangeFromSpec(rangeSpec, objectSize) {
* - or an 'error' attribute instead of type errors.InvalidRange if * - or an 'error' attribute instead of type errors.InvalidRange if
* the requested range is out of object's boundaries. * the requested range is out of object's boundaries.
*/ */
function parseRange(rangeHeader, objectSize) { export function parseRange(rangeHeader, objectSize) {
const rangeSpec = parseRangeSpec(rangeHeader); const rangeSpec = parseRangeSpec(rangeHeader);
if (rangeSpec.error) { if (rangeSpec.error) {
// invalid range syntax is silently ignored in HTTP spec, // invalid range syntax is silently ignored in HTTP spec,
@ -105,15 +103,8 @@ function parseRange(rangeHeader, objectSize) {
return getByteRangeFromSpec(rangeSpec, objectSize); return getByteRangeFromSpec(rangeSpec, objectSize);
} }
function checkSupportIPv6() { export function checkSupportIPv6() {
const niList = os.networkInterfaces(); const niList = os.networkInterfaces();
return Object.keys(niList).some(network => return Object.keys(niList).some(network =>
niList[network].some(intfc => intfc.family === 'IPv6')); niList[network].some(intfc => intfc.family === 'IPv6'));
} }
module.exports = {
parseRangeSpec,
getByteRangeFromSpec,
parseRange,
checkSupportIPv6,
};

15
lib/network/index.ts Normal file
View File

@ -0,0 +1,15 @@
import server from './http/server';
export * as rpc from './rpc/rpc';
export * as level from './rpc/level-net';
import RESTServer from './rest/RESTServer';
import RESTClient from './rest/RESTClient';
export { default as RoundRobin } from './RoundRobin';
import * as ProbeServer from './probe/ProbeServer';
import HealthProbeServer from './probe/HealthProbeServer';
import * as Utils from './probe/Utils';
export * as kmip from './kmip';
export { default as kmipClient } from './kmip/Client';
export const http = { server };
export const rest = { RESTServer, RESTClient };
export const probe = { ProbeServer, HealthProbeServer, Utils };

View File

@ -1,12 +1,8 @@
'use strict'; // eslint-disable-line import async from 'async';
/* eslint new-cap: "off" */ import errors from '../../errors';
import TTLVCodec from './codec/ttlv';
const async = require('async'); import TlsTransport from './transport/tls';
import KMIP from '.';
const errors = require('../../errors');
const TTLVCodec = require('./codec/ttlv.js');
const TlsTransport = require('./transport/tls.js');
const KMIP = require('.');
const CRYPTOGRAPHIC_OBJECT_TYPE = 'Symmetric Key'; const CRYPTOGRAPHIC_OBJECT_TYPE = 'Symmetric Key';
const CRYPTOGRAPHIC_ALGORITHM = 'AES'; const CRYPTOGRAPHIC_ALGORITHM = 'AES';
@ -241,8 +237,7 @@ function _queryOperationsAndObjects(client, logger, cb) {
}); });
} }
export default class Client {
class Client {
/** /**
* Construct a high level KMIP driver suitable for cloudserver * Construct a high level KMIP driver suitable for cloudserver
* @param {Object} options - Instance options * @param {Object} options - Instance options
@ -604,5 +599,3 @@ class Client {
}); });
} }
} }
module.exports = Client;

View File

@ -1,6 +1,4 @@
'use strict'; // eslint-disable-line import assert from 'assert';
const assert = require('assert');
function _lookup(decodedTTLV, path) { function _lookup(decodedTTLV, path) {
@ -31,7 +29,7 @@ function _lookup(decodedTTLV, path) {
return res; return res;
} }
class Message { export default class Message {
/** /**
* Construct a new abstract Message * Construct a new abstract Message
* @param {Object} content - the content of the message * @param {Object} content - the content of the message
@ -50,5 +48,3 @@ class Message {
return _lookup(this.content, path); return _lookup(this.content, path);
} }
} }
module.exports = Message;

View File

@ -1,8 +1,5 @@
'use strict'; // eslint-disable-line import KMIPTags from '../tags.json';
/* eslint dot-notation: "off" */ import KMIPMessage from '../Message';
const KMIPTags = require('../tags.json');
const KMIPMessage = require('../Message.js');
const UINT32_MAX = Math.pow(2, 32); const UINT32_MAX = Math.pow(2, 32);
@ -26,7 +23,7 @@ function _throwError(logger, msg, data) {
throw Error(msg); throw Error(msg);
} }
function TTLVCodec() { export default function TTLVCodec() {
if (!new.target) { if (!new.target) {
return new TTLVCodec(); return new TTLVCodec();
} }
@ -430,5 +427,3 @@ function TTLVCodec() {
}; };
return this; return this;
} }
module.exports = TTLVCodec;

View File

@ -1,9 +1,6 @@
'use strict'; // eslint-disable-line import uuidv4 from 'uuid/v4';
/* eslint new-cap: "off" */
const uuidv4 = require('uuid/v4'); import Message from './Message';
const Message = require('./Message.js');
/* This client requires at least a KMIP 1.2 compatible server */ /* This client requires at least a KMIP 1.2 compatible server */
const DEFAULT_PROTOCOL_VERSION_MAJOR = 1; const DEFAULT_PROTOCOL_VERSION_MAJOR = 1;
@ -22,7 +19,7 @@ function _PrimitiveType(tagName, type, value) {
return { [tagName]: { type, value } }; return { [tagName]: { type, value } };
} }
class KMIP { export default class KMIP {
/** /**
* Construct a new KMIP Object * Construct a new KMIP Object
* @param {Class} Codec - * @param {Class} Codec -
@ -343,6 +340,3 @@ class KMIP {
}); });
} }
} }
module.exports = KMIP;

View File

@ -1,11 +1,9 @@
'use strict'; // eslint-disable-line import assert from 'assert';
const assert = require('assert');
const DEFAULT_PIPELINE_DEPTH = 8; const DEFAULT_PIPELINE_DEPTH = 8;
const DEFAULT_KMIP_PORT = 5696; const DEFAULT_KMIP_PORT = 5696;
class TransportTemplate { export default class TransportTemplate {
/** /**
* Construct a new object of the TransportTemplate class * Construct a new object of the TransportTemplate class
* @param {Object} channel - Typically the tls object * @param {Object} channel - Typically the tls object
@ -170,5 +168,3 @@ class TransportTemplate {
conversation.end(); conversation.end();
} }
} }
module.exports = TransportTemplate;

View File

@ -1,12 +0,0 @@
'use strict'; // eslint-disable-line
const tls = require('tls');
const TransportTemplate = require('./TransportTemplate.js');
class TlsTransport extends TransportTemplate {
constructor(options) {
super(tls, options);
}
}
module.exports = TlsTransport;

View File

@ -0,0 +1,8 @@
import * as tls from 'tls';
import TransportTemplate from './TransportTemplate';
export default class TlsTransport extends TransportTemplate {
constructor(options) {
super(tls, options);
}
}

View File

@ -1,14 +1,14 @@
const httpServer = require('../http/server'); import httpServer from '../http/server';
const werelogs = require('werelogs'); import werelogs from 'werelogs';
const errors = require('../../errors'); import errors from '../../errors';
const ZenkoMetrics = require('../../metrics/ZenkoMetrics'); import ZenkoMetrics from '../../metrics/ZenkoMetrics';
const { sendSuccess, sendError } = require('./Utils'); import { sendSuccess, sendError } from './Utils';
function checkStub(log) { // eslint-disable-line function checkStub(log) { // eslint-disable-line
return true; return true;
} }
class HealthProbeServer extends httpServer { export default class HealthProbeServer extends httpServer {
constructor(params) { constructor(params) {
const logging = new werelogs.Logger('HealthProbeServer'); const logging = new werelogs.Logger('HealthProbeServer');
super(params.port, logging); super(params.port, logging);
@ -72,5 +72,3 @@ class HealthProbeServer extends httpServer {
res.end(ZenkoMetrics.asPrometheus()); res.end(ZenkoMetrics.asPrometheus());
} }
} }
module.exports = HealthProbeServer;

View File

@ -1,10 +1,10 @@
const httpServer = require('../http/server'); import httpServer from '../http/server';
const werelogs = require('werelogs'); import werelogs from 'werelogs';
const errors = require('../../errors'); import errors from '../../errors';
const DEFAULT_LIVE_ROUTE = '/_/live'; export const DEFAULT_LIVE_ROUTE = '/_/live';
const DEFAULT_READY_ROUTE = '/_/ready'; export const DEFAULT_READY_ROUTE = '/_/ready';
const DEFAULT_METRICS_ROUTE = '/metrics'; export const DEFAULT_METRICS_ROUTE = '/metrics';
/** /**
* ProbeDelegate is used to handle probe checks. * ProbeDelegate is used to handle probe checks.
@ -28,7 +28,7 @@ const DEFAULT_METRICS_ROUTE = '/metrics';
* *
* @extends {httpServer} * @extends {httpServer}
*/ */
class ProbeServer extends httpServer { export class ProbeServer extends httpServer {
/** /**
* Create a new ProbeServer with parameters * Create a new ProbeServer with parameters
* *
@ -92,10 +92,3 @@ class ProbeServer extends httpServer {
this._handlers.get(req.url)(res, log); this._handlers.get(req.url)(res, log);
} }
} }
module.exports = {
ProbeServer,
DEFAULT_LIVE_ROUTE,
DEFAULT_READY_ROUTE,
DEFAULT_METRICS_ROUTE,
};

View File

@ -5,7 +5,7 @@
* @param {string} [message] - Message to send as response, defaults to OK * @param {string} [message] - Message to send as response, defaults to OK
* @returns {undefined} * @returns {undefined}
*/ */
function sendSuccess(res, log, message = 'OK') { export function sendSuccess(res, log, message = 'OK') {
log.debug('replying with success'); log.debug('replying with success');
res.writeHead(200); res.writeHead(200);
res.end(message); res.end(message);
@ -19,7 +19,7 @@ function sendSuccess(res, log, message = 'OK') {
* @param {string} [optMessage] - Message to use instead of the errors message * @param {string} [optMessage] - Message to use instead of the errors message
* @returns {undefined} * @returns {undefined}
*/ */
function sendError(res, log, error, optMessage) { export function sendError(res, log, error, optMessage) {
const message = optMessage || error.description || ''; const message = optMessage || error.description || '';
log.debug('sending back error response', log.debug('sending back error response',
{ {
@ -34,8 +34,3 @@ function sendError(res, log, error, optMessage) {
errorMessage: message, errorMessage: message,
})); }));
} }
module.exports = {
sendSuccess,
sendError,
};

View File

@ -1,12 +1,10 @@
'use strict'; // eslint-disable-line import assert from 'assert';
import http from 'http';
import werelogs from 'werelogs';
const assert = require('assert'); import * as constants from '../../constants';
const http = require('http'); import * as utils from './utils';
const werelogs = require('werelogs'); import errors from '../../errors';
const constants = require('../../constants');
const utils = require('./utils');
const errors = require('../../errors');
const HttpAgent = require('agentkeepalive'); const HttpAgent = require('agentkeepalive');
@ -64,7 +62,7 @@ function makeErrorFromHTTPResponse(response) {
* *
* The API is usable when the object is constructed. * The API is usable when the object is constructed.
*/ */
class RESTClient { export default class RESTClient {
/** /**
* Interface to the data file server * Interface to the data file server
* @constructor * @constructor
@ -311,5 +309,3 @@ class RESTClient {
* @callback RESTClient~deleteCallback * @callback RESTClient~deleteCallback
* @param {Error} - The encountered error * @param {Error} - The encountered error
*/ */
module.exports = RESTClient;

View File

@ -1,15 +1,13 @@
'use strict'; // eslint-disable-line import assert from 'assert';
import url from 'url';
const assert = require('assert'); import werelogs from 'werelogs';
const url = require('url');
const werelogs = require('werelogs'); import httpServer from '../http/server';
import * as constants from '../../constants';
const httpServer = require('../http/server'); import { parseURL } from './utils';
const constants = require('../../constants'); import * as httpUtils from '../http/utils';
const { parseURL } = require('./utils'); import errors from '../../errors';
const httpUtils = require('../http/utils');
const errors = require('../../errors');
function setContentLength(response, contentLength) { function setContentLength(response, contentLength) {
response.setHeader('Content-Length', contentLength.toString()); response.setHeader('Content-Length', contentLength.toString());
@ -44,7 +42,7 @@ function sendError(res, log, error, optMessage) {
* You have to call setup() to initialize the storage backend, then * You have to call setup() to initialize the storage backend, then
* start() to start listening to the configured port. * start() to start listening to the configured port.
*/ */
class RESTServer extends httpServer { export default class RESTServer extends httpServer {
/** /**
* @constructor * @constructor
* @param {Object} params - constructor params * @param {Object} params - constructor params
@ -273,5 +271,3 @@ class RESTServer extends httpServer {
return undefined; return undefined;
} }
} }
module.exports = RESTServer;

View File

@ -1,12 +1,10 @@
'use strict'; // eslint-disable-line import errors from '../../errors';
import * as constants from '../../constants';
const errors = require('../../errors'); import * as url from 'url';
const constants = require('../../constants');
const url = require('url');
const passthroughPrefixLength = constants.passthroughFileURL.length; const passthroughPrefixLength = constants.passthroughFileURL.length;
function explodePath(path) { export function explodePath(path) {
if (path.startsWith(constants.passthroughFileURL)) { if (path.startsWith(constants.passthroughFileURL)) {
const key = path.slice(passthroughPrefixLength + 1); const key = path.slice(passthroughPrefixLength + 1);
return { return {
@ -37,7 +35,7 @@ function explodePath(path) {
* - pathInfo.service {String} - The name of REST service ("DataFile") * - pathInfo.service {String} - The name of REST service ("DataFile")
* - pathInfo.key {String} - The requested key * - pathInfo.key {String} - The requested key
*/ */
function parseURL(urlStr, expectKey) { export function parseURL(urlStr, expectKey) {
const urlObj = url.parse(urlStr); const urlObj = url.parse(urlStr);
const pathInfo = explodePath(decodeURI(urlObj.path)); const pathInfo = explodePath(decodeURI(urlObj.path));
if ((pathInfo.service !== constants.dataFileURL) if ((pathInfo.service !== constants.dataFileURL)
@ -61,8 +59,3 @@ function parseURL(urlStr, expectKey) {
} }
return pathInfo; return pathInfo;
} }
module.exports = {
explodePath,
parseURL,
};

View File

@ -1,8 +1,6 @@
'use strict'; // eslint-disable-line import assert from 'assert';
const assert = require('assert'); import * as rpc from './rpc';
const rpc = require('./rpc.js');
/** /**
* @class * @class
@ -16,7 +14,7 @@ const rpc = require('./rpc.js');
* openSub() can be used to open sub-levels, returning a new LevelDB * openSub() can be used to open sub-levels, returning a new LevelDB
* RPC client object accessing the sub-level transparently. * RPC client object accessing the sub-level transparently.
*/ */
class LevelDbClient extends rpc.BaseClient { export class LevelDbClient extends rpc.BaseClient {
/** /**
* @constructor * @constructor
* *
@ -76,7 +74,7 @@ class LevelDbClient extends rpc.BaseClient {
* database is opened and attached to the call environment in * database is opened and attached to the call environment in
* env.subDb (env is passed as first parameter of received RPC calls). * env.subDb (env is passed as first parameter of received RPC calls).
*/ */
class LevelDbService extends rpc.BaseService { export class LevelDbService extends rpc.BaseService {
/** /**
* @constructor * @constructor
* *
@ -123,8 +121,3 @@ class LevelDbService extends rpc.BaseService {
return subDb; return subDb;
} }
} }
module.exports = {
LevelDbClient,
LevelDbService,
};

View File

@ -1,17 +1,14 @@
'use strict'; // eslint-disable-line import http from 'http';
import io from 'socket.io';
import ioClient from 'socket.io-client';
import * as sioStream from './sio-stream';
import async from 'async';
import assert from 'assert';
import { EventEmitter } from 'events';
const http = require('http'); import { flattenError, reconstructError } from './utils';
const io = require('socket.io'); import errors from '../../errors';
const ioClient = require('socket.io-client'); import * as jsutil from '../../jsutil';
const sioStream = require('./sio-stream');
const async = require('async');
const assert = require('assert');
const EventEmitter = require('events').EventEmitter;
const flattenError = require('./utils').flattenError;
const reconstructError = require('./utils').reconstructError;
const errors = require('../../errors');
const jsutil = require('../../jsutil');
const DEFAULT_CALL_TIMEOUT_MS = 30000; const DEFAULT_CALL_TIMEOUT_MS = 30000;
@ -36,7 +33,7 @@ let streamRPCJSONObj;
* - the return value is passed as callback's second argument (unless * - the return value is passed as callback's second argument (unless
* an error occurred). * an error occurred).
*/ */
class BaseClient extends EventEmitter { export class BaseClient extends EventEmitter {
/** /**
* @constructor * @constructor
* *
@ -249,7 +246,7 @@ class BaseClient extends EventEmitter {
* method. * method.
* *
*/ */
class BaseService { export class BaseService {
/** /**
* @constructor * @constructor
* *
@ -467,7 +464,8 @@ class BaseService {
* @return {Object} a server object, not yet listening on a TCP port * @return {Object} a server object, not yet listening on a TCP port
* (you must call listen(port) on the returned object) * (you must call listen(port) on the returned object)
*/ */
function RPCServer(params) {
export function RPCServer(params) {
assert(params.logger); assert(params.logger);
const httpServer = http.createServer(); const httpServer = http.createServer();
@ -666,7 +664,7 @@ streamRPCJSONObj = function _streamRPCJSONObj(obj, wstream, cb) {
* @return {Object} a HTTP server object, not yet listening on a TCP * @return {Object} a HTTP server object, not yet listening on a TCP
* port (you must call listen(port) on the returned object) * port (you must call listen(port) on the returned object)
*/ */
function RESTServer(params) { export function RESTServer(params) {
assert(params); assert(params);
assert(params.logger); assert(params.logger);
const httpServer = http.createServer((req, res) => { const httpServer = http.createServer((req, res) => {
@ -737,11 +735,3 @@ function RESTServer(params) {
return httpServer; return httpServer;
} }
module.exports = {
BaseClient,
BaseService,
RPCServer,
RESTServer,
};

Some files were not shown because too many files have changed in this diff Show More