Compare commits

...

11 Commits

Author SHA1 Message Date
Guillaume Hivert d50ba76793 ARSN-67 Upload Artifacts 2022-03-04 12:13:57 +01:00
Guillaume Hivert 19c83f9d2e ARSN-67 Switch index.ts to import/export and fix JSON import in policyValidator 2022-03-04 11:49:12 +01:00
Guillaume Hivert fc35b118e3 ARSN-67 Rename index.js to index.ts for proper future migration 2022-03-04 11:30:18 +01:00
Guillaume Hivert d42a808abe ARSN-67 Remove ignore of build for NPM
Installing from git sources for dependents produced only an index.js
file. It was due to .gitignore ignoring the build folder and npm/yarn
removing the ignored files after install. Adding an empty .npmignore
solves the problem. This can be found here:
https://stackoverflow.com/questions/61754026/installing-npm-package-with-prepare-script-from-yarn-produces-only-index-js
2022-03-04 11:29:32 +01:00
Guillaume Hivert c37035cc9e ARSN-67 Add TypeScript and Babel, and make test suite working 2022-03-04 11:27:07 +01:00
Guillaume Hivert c87627f28a ARSN-84 Correct Jest configuration for test suites and coverage
Thanks to files renaming, we can follow as much as we can the jest
default configurations. The options are gone, and we're specifying only
the maxWorkers (because the test suite is linear, and bugs if we're
running it in parallel) and the collect coverage files.
The coverage script itself is joined into one command instead of three
to leverage the Jest builtin coverage.
2022-03-04 10:02:16 +01:00
Guillaume Hivert 21326b0ea1 ARSN-84 Rename all test files from [name].js to [name].spec.js
In order to simplify jest configuration, we have to remane the files to
follow the jest convention (to have a .spec.js extension for test
files).
2022-03-04 10:02:16 +01:00
Guillaume Hivert 1ad20826cd ARSN-84 Fix Jest bug in _arsenalError
You can check out the bug at
https://github.com/facebook/jest/issues/2549.
The bug in inherent to jest and is a known bug since years, because jest
is switching the VM from node to a custom VM from jest. Jest injects
its own set of globals. The Error provided by Jest is different from
the Error provided by node and the test `err instanceof Error` is false.
Error:
```
 Expected value to be equal to:
      true
 Received:
      false
```
2022-03-04 10:02:16 +01:00
Guillaume Hivert aeac58560e ARSN-84 Fix redis commands in functional tests
The switch from mocha to Jest introduces some tests bugs.
As far as we can tell, jest is quicker than mocha, creating some
weird behaviour: some commands send to redis (with ioredis)
are working, and some aren’t. Our conclusion is that redis needs
to queue requests offline to avoid micro-disconnections from
redis in development. Otherwise, we got the following error:
```
  - StatsModel class › should correctly record a new request by default
one increment

    assert.ifError(received, expected)

    Expected value ifError to:
      null
    Received:
      [Error: Stream isn't writeable and enableOfflineQueue options is
false]

    Message:
      ifError got unwanted exception: Stream isn't writeable and
enableOfflineQueue options is false
```
Switching enableOfflineQueue to true makes the test suite to
success.
2022-03-04 10:02:16 +01:00
Guillaume Hivert 940fa18b72 ARSN-84 Fix linting with correct indentation and trailing commas 2022-03-04 10:02:16 +01:00
Guillaume Hivert f0a2fbb47c ARSN-84 Introduce Jest and reconfigure ESLint
Add Jest as a test runner as a mocha replacement to have the
TS compiling on the fly and allowing mixed sources TS/JS in the
sources (and replacing the before and after of mocha with beforeAll
and afterAll of Jest), and adding some ESLint configuration to make
ESLint happy.
2022-03-03 18:44:17 +01:00
166 changed files with 5554 additions and 2799 deletions

View File

@ -31,7 +31,8 @@ jobs:
node-version: '16'
cache: 'yarn'
- name: install dependencies
run: yarn install --frozen-lockfile
run: yarn cache clean && yarn install --frozen-lockfile
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
- name: lint yaml
run: yarn --silent lint_yml
- name: lint javascript
@ -45,3 +46,31 @@ jobs:
- name: run executables tests
run: yarn install && yarn test
working-directory: 'lib/executables/pensieveCreds/'
compile:
name: Compile and upload build artifacts
needs: test
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install NodeJS
uses: actions/setup-node@v2
with:
node-version: '16'
cache: yarn
- name: Install dependencies
run: yarn cache clean && yarn install --frozen-lockfile
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
- name: Compile
run: yarn build
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
- name: Upload artifacts
uses: scality/action-artifacts@v2
with:
url: https://artifacts.scality.net
user: ${{ secrets.ARTIFACTS_USER }}
password: ${{ secrets.ARTIFACTS_PASSWORD }}
source: ./build
method: upload
if: success()

6
.gitignore vendored
View File

@ -10,3 +10,9 @@ node_modules/
*-linux
*-macos
# Coverage
coverage/
.nyc_output/
# TypeScript
build/

0
.npmignore Normal file
View File

6
babel.config.js Normal file
View File

@ -0,0 +1,6 @@
module.exports = {
presets: [
['@babel/preset-env', { targets: { node: 'current' } }],
'@babel/preset-typescript',
],
};

147
index.js
View File

@ -1,147 +0,0 @@
module.exports = {
auth: require('./lib/auth/auth'),
constants: require('./lib/constants'),
db: require('./lib/db'),
errors: require('./lib/errors.js'),
shuffle: require('./lib/shuffle'),
stringHash: require('./lib/stringHash'),
ipCheck: require('./lib/ipCheck'),
jsutil: require('./lib/jsutil'),
https: {
ciphers: require('./lib/https/ciphers.js'),
dhparam: require('./lib/https/dh2048.js'),
},
algorithms: {
list: {
Basic: require('./lib/algos/list/basic').List,
Delimiter: require('./lib/algos/list/delimiter').Delimiter,
DelimiterVersions: require('./lib/algos/list/delimiterVersions')
.DelimiterVersions,
DelimiterMaster: require('./lib/algos/list/delimiterMaster')
.DelimiterMaster,
MPU: require('./lib/algos/list/MPU').MultipartUploads,
},
listTools: {
DelimiterTools: require('./lib/algos/list/tools'),
},
cache: {
LRUCache: require('./lib/algos/cache/LRUCache'),
},
stream: {
MergeStream: require('./lib/algos/stream/MergeStream'),
},
SortedSet: require('./lib/algos/set/SortedSet'),
},
policies: {
evaluators: require('./lib/policyEvaluator/evaluator.js'),
validateUserPolicy: require('./lib/policy/policyValidator')
.validateUserPolicy,
evaluatePrincipal: require('./lib/policyEvaluator/principal'),
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
requestUtils: require('./lib/policyEvaluator/requestUtils'),
actionMaps: require('./lib/policyEvaluator/utils/actionMaps'),
},
Clustering: require('./lib/Clustering'),
testing: {
matrix: require('./lib/testing/matrix.js'),
},
versioning: {
VersioningConstants: require('./lib/versioning/constants.js')
.VersioningConstants,
Version: require('./lib/versioning/Version.js').Version,
VersionID: require('./lib/versioning/VersionID.js'),
WriteGatheringManager: require('./lib/versioning/WriteGatheringManager.js'),
WriteCache: require('./lib/versioning/WriteCache.js'),
VersioningRequestProcessor: require('./lib/versioning/VersioningRequestProcessor.js'),
},
network: {
http: {
server: require('./lib/network/http/server'),
},
rpc: require('./lib/network/rpc/rpc'),
level: require('./lib/network/rpc/level-net'),
rest: {
RESTServer: require('./lib/network/rest/RESTServer'),
RESTClient: require('./lib/network/rest/RESTClient'),
},
probe: {
ProbeServer: require('./lib/network/probe/ProbeServer'),
},
RoundRobin: require('./lib/network/RoundRobin'),
kmip: require('./lib/network/kmip'),
kmipClient: require('./lib/network/kmip/Client'),
},
s3routes: {
routes: require('./lib/s3routes/routes'),
routesUtils: require('./lib/s3routes/routesUtils'),
},
s3middleware: {
userMetadata: require('./lib/s3middleware/userMetadata'),
convertToXml: require('./lib/s3middleware/convertToXml'),
escapeForXml: require('./lib/s3middleware/escapeForXml'),
objectLegalHold: require('./lib/s3middleware/objectLegalHold'),
tagging: require('./lib/s3middleware/tagging'),
validateConditionalHeaders:
require('./lib/s3middleware/validateConditionalHeaders')
.validateConditionalHeaders,
MD5Sum: require('./lib/s3middleware/MD5Sum'),
NullStream: require('./lib/s3middleware/nullStream'),
objectUtils: require('./lib/s3middleware/objectUtils'),
azureHelper: {
mpuUtils:
require('./lib/s3middleware/azureHelpers/mpuUtils'),
ResultsCollector:
require('./lib/s3middleware/azureHelpers/ResultsCollector'),
SubStreamInterface:
require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
},
retention: require('./lib/s3middleware/objectRetention'),
lifecycleHelpers: require('./lib/s3middleware/lifecycleHelpers'),
},
storage: {
metadata: {
MetadataFileServer:
require('./lib/storage/metadata/file/MetadataFileServer'),
MetadataFileClient:
require('./lib/storage/metadata/file/MetadataFileClient'),
LogConsumer:
require('./lib/storage/metadata/bucketclient/LogConsumer'),
},
data: {
file: {
DataFileStore:
require('./lib/storage/data/file/DataFileStore'),
},
},
utils: require('./lib/storage/utils'),
},
models: {
BucketInfo: require('./lib/models/BucketInfo'),
ObjectMD: require('./lib/models/ObjectMD'),
ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
ARN: require('./lib/models/ARN'),
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
ReplicationConfiguration:
require('./lib/models/ReplicationConfiguration'),
LifecycleConfiguration:
require('./lib/models/LifecycleConfiguration'),
LifecycleRule: require('./lib/models/LifecycleRule'),
BucketPolicy: require('./lib/models/BucketPolicy'),
ObjectLockConfiguration:
require('./lib/models/ObjectLockConfiguration'),
NotificationConfiguration:
require('./lib/models/NotificationConfiguration'),
},
metrics: {
StatsClient: require('./lib/metrics/StatsClient'),
StatsModel: require('./lib/metrics/StatsModel'),
RedisClient: require('./lib/metrics/RedisClient'),
ZenkoMetrics: require('./lib/metrics/ZenkoMetrics'),
},
pensieve: {
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
},
stream: {
readJSONStreamObject: require('./lib/stream/readJSONStreamObject'),
},
};

142
index.ts Normal file
View File

@ -0,0 +1,142 @@
export const auth = require('./lib/auth/auth');
export const constants = require('./lib/constants');
export const db = require('./lib/db');
export const errors = require('./lib/errors.js');
export const shuffle = require('./lib/shuffle');
export const stringHash = require('./lib/stringHash');
export const ipCheck = require('./lib/ipCheck');
export const jsutil = require('./lib/jsutil');
export const Clustering = require('./lib/Clustering');
export const https = {
ciphers: require('./lib/https/ciphers.js'),
dhparam: require('./lib/https/dh2048.js'),
};
export const algorithms = {
list: {
Basic: require('./lib/algos/list/basic').List,
Delimiter: require('./lib/algos/list/delimiter').Delimiter,
DelimiterVersions: require('./lib/algos/list/delimiterVersions').DelimiterVersions,
DelimiterMaster: require('./lib/algos/list/delimiterMaster').DelimiterMaster,
MPU: require('./lib/algos/list/MPU').MultipartUploads,
},
listTools: {
DelimiterTools: require('./lib/algos/list/tools'),
},
cache: {
LRUCache: require('./lib/algos/cache/LRUCache'),
},
stream: {
MergeStream: require('./lib/algos/stream/MergeStream'),
},
SortedSet: require('./lib/algos/set/SortedSet'),
};
export const policies = {
evaluators: require('./lib/policyEvaluator/evaluator.js'),
validateUserPolicy: require('./lib/policy/policyValidator').validateUserPolicy,
evaluatePrincipal: require('./lib/policyEvaluator/principal'),
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
requestUtils: require('./lib/policyEvaluator/requestUtils'),
actionMaps: require('./lib/policyEvaluator/utils/actionMaps'),
};
export const testing = {
matrix: require('./lib/testing/matrix.js'),
};
export const versioning = {
VersioningConstants: require('./lib/versioning/constants.js').VersioningConstants,
Version: require('./lib/versioning/Version.js').Version,
VersionID: require('./lib/versioning/VersionID.js'),
WriteGatheringManager: require('./lib/versioning/WriteGatheringManager.js'),
WriteCache: require('./lib/versioning/WriteCache.js'),
VersioningRequestProcessor: require('./lib/versioning/VersioningRequestProcessor.js'),
};
export const network = {
http: {
server: require('./lib/network/http/server'),
},
rpc: require('./lib/network/rpc/rpc'),
level: require('./lib/network/rpc/level-net'),
rest: {
RESTServer: require('./lib/network/rest/RESTServer'),
RESTClient: require('./lib/network/rest/RESTClient'),
},
probe: {
ProbeServer: require('./lib/network/probe/ProbeServer'),
},
RoundRobin: require('./lib/network/RoundRobin'),
kmip: require('./lib/network/kmip'),
kmipClient: require('./lib/network/kmip/Client'),
};
export const s3routes = {
routes: require('./lib/s3routes/routes'),
routesUtils: require('./lib/s3routes/routesUtils'),
};
export const s3middleware = {
userMetadata: require('./lib/s3middleware/userMetadata'),
convertToXml: require('./lib/s3middleware/convertToXml'),
escapeForXml: require('./lib/s3middleware/escapeForXml'),
objectLegalHold: require('./lib/s3middleware/objectLegalHold'),
tagging: require('./lib/s3middleware/tagging'),
validateConditionalHeaders:
require('./lib/s3middleware/validateConditionalHeaders').validateConditionalHeaders,
MD5Sum: require('./lib/s3middleware/MD5Sum'),
NullStream: require('./lib/s3middleware/nullStream'),
objectUtils: require('./lib/s3middleware/objectUtils'),
azureHelper: {
mpuUtils: require('./lib/s3middleware/azureHelpers/mpuUtils'),
ResultsCollector: require('./lib/s3middleware/azureHelpers/ResultsCollector'),
SubStreamInterface: require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
},
retention: require('./lib/s3middleware/objectRetention'),
lifecycleHelpers: require('./lib/s3middleware/lifecycleHelpers'),
};
export const storage = {
metadata: {
MetadataFileServer: require('./lib/storage/metadata/file/MetadataFileServer'),
MetadataFileClient: require('./lib/storage/metadata/file/MetadataFileClient'),
LogConsumer: require('./lib/storage/metadata/bucketclient/LogConsumer'),
},
data: {
file: {
DataFileStore: require('./lib/storage/data/file/DataFileStore'),
},
},
utils: require('./lib/storage/utils'),
};
export const models = {
BucketInfo: require('./lib/models/BucketInfo'),
ObjectMD: require('./lib/models/ObjectMD'),
ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
ARN: require('./lib/models/ARN'),
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
ReplicationConfiguration: require('./lib/models/ReplicationConfiguration'),
LifecycleConfiguration: require('./lib/models/LifecycleConfiguration'),
LifecycleRule: require('./lib/models/LifecycleRule'),
BucketPolicy: require('./lib/models/BucketPolicy'),
ObjectLockConfiguration: require('./lib/models/ObjectLockConfiguration'),
NotificationConfiguration: require('./lib/models/NotificationConfiguration'),
};
export const metrics = {
StatsClient: require('./lib/metrics/StatsClient'),
StatsModel: require('./lib/metrics/StatsModel'),
RedisClient: require('./lib/metrics/RedisClient'),
ZenkoMetrics: require('./lib/metrics/ZenkoMetrics'),
};
export const pensieve = {
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
};
export const stream = {
readJSONStreamObject: require('./lib/stream/readJSONStreamObject'),
};

View File

@ -1,7 +1,7 @@
'use strict'; // eslint-disable-line strict
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT } = require('./tools');
FILTER_END, FILTER_ACCEPT } = require('./tools');
const DEFAULT_MAX_KEYS = 1000;
const VSConst = require('../../versioning/constants').VersioningConstants;
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;

View File

@ -2,7 +2,7 @@
const Extension = require('./Extension').default;
const { inc, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
const VSConst = require('../../versioning/constants').VersioningConstants;
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
@ -66,7 +66,7 @@ class Delimiter extends Extension {
this.continuationToken = parameters.continuationToken;
this.alphabeticalOrder =
typeof parameters.alphabeticalOrder !== 'undefined' ?
parameters.alphabeticalOrder : true;
parameters.alphabeticalOrder : true;
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
// results
@ -87,7 +87,7 @@ class Delimiter extends Extension {
this[this.nextContinueMarker].startsWith(this.prefix || '')) {
const nextDelimiterIndex =
this[this.nextContinueMarker].indexOf(this.delimiter,
this.prefix ? this.prefix.length : 0);
this.prefix ? this.prefix.length : 0);
this[this.nextContinueMarker] =
this[this.nextContinueMarker].slice(0, nextDelimiterIndex +
this.delimiter.length);

View File

@ -194,7 +194,7 @@ class DelimiterVersions extends Delimiter {
// length is the same so we can remove their prefix without
// looking at the type of key
return this.filterCommon(obj.key.slice(DbPrefixes.Master.length),
obj.value);
obj.value);
}
filterCommon(key, value) {
@ -250,7 +250,7 @@ class DelimiterVersions extends Delimiter {
}
// skip to the same object key in both M and V range listings
return [DbPrefixes.Master + skipV0,
DbPrefixes.Version + skipV0];
DbPrefixes.Version + skipV0];
}
/**

View File

@ -1,7 +1,6 @@
const ArrayUtils = require('./ArrayUtils');
class SortedSet {
constructor(obj) {
if (obj) {
this.keys = obj.keys;

View File

@ -91,7 +91,7 @@ class Vault {
requestContext: serializedRCsArr,
},
(err, userInfo) => vaultSignatureCb(err, userInfo,
params.log, callback)
params.log, callback),
);
}
@ -146,7 +146,7 @@ class Vault {
requestContext: serializedRCs,
},
(err, userInfo) => vaultSignatureCb(err, userInfo,
params.log, callback, streamingV4Params)
params.log, callback, streamingV4Params),
);
}
@ -232,28 +232,28 @@ class Vault {
*/
getAccountIds(canonicalIDs, log, callback) {
log.trace('getting accountIds from Vault based on canonicalIDs',
{ canonicalIDs });
{ canonicalIDs });
this.client.getAccountIds(canonicalIDs,
{ reqUid: log.getSerializedUids() },
(err, info) => {
if (err) {
log.debug('received error message from vault',
{ errorMessage: err });
return callback(err);
}
const infoFromVault = info.message.body;
log.trace('info received from vault', { infoFromVault });
const result = {};
/* If the accountId was not found in Vault, do not
send the canonicalID back to the API */
Object.keys(infoFromVault).forEach(key => {
if (infoFromVault[key] !== 'NotFound' &&
infoFromVault[key] !== 'WrongFormat') {
result[key] = infoFromVault[key];
{ reqUid: log.getSerializedUids() },
(err, info) => {
if (err) {
log.debug('received error message from vault',
{ errorMessage: err });
return callback(err);
}
const infoFromVault = info.message.body;
log.trace('info received from vault', { infoFromVault });
const result = {};
/* If the accountId was not found in Vault, do not
send the canonicalID back to the API */
Object.keys(infoFromVault).forEach(key => {
if (infoFromVault[key] !== 'NotFound' &&
infoFromVault[key] !== 'WrongFormat') {
result[key] = infoFromVault[key];
}
});
return callback(null, result);
});
return callback(null, result);
});
}
/** checkPolicies -- call Vault to evaluate policies

View File

@ -72,7 +72,7 @@ function extractParams(request, log, awsService, data) {
version = 'v4';
} else {
log.trace('invalid authorization security header',
{ header: authHeader });
{ header: authHeader });
return { err: errors.AccessDenied };
}
} else if (data.Signature) {
@ -87,7 +87,7 @@ function extractParams(request, log, awsService, data) {
if (version !== null && method !== null) {
if (!checkFunctions[version] || !checkFunctions[version][method]) {
log.trace('invalid auth version or method',
{ version, authMethod: method });
{ version, authMethod: method });
return { err: errors.NotImplemented };
}
log.trace('identified auth method', { version, authMethod: method });
@ -159,7 +159,7 @@ function doAuth(request, log, cb, awsService, requestContexts) {
* @return {undefined}
*/
function generateV4Headers(request, data, accessKey, secretKeyValue,
awsService, proxyPath, sessionToken) {
awsService, proxyPath, sessionToken) {
Object.assign(request, { headers: {} });
const amzDate = convertUTCtoISO8601(Date.now());
// get date without time
@ -192,16 +192,16 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
.filter(headerName =>
headerName.startsWith('x-amz-')
|| headerName.startsWith('x-scal-')
|| headerName === 'host'
|| headerName === 'host',
).sort().join(';');
const params = { request, signedHeaders, payloadChecksum,
credentialScope, timestamp, query: data,
awsService: service, proxyPath };
const stringToSign = constructStringToSignV4(params);
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue,
region,
scopeDate,
service);
region,
scopeDate,
service);
const signature = crypto.createHmac('sha256', signingKey)
.update(stringToSign, 'binary').digest('hex');
const authorizationHeader = `${algorithm} Credential=${accessKey}` +

View File

@ -26,20 +26,20 @@ class AuthLoader {
.required();
const accountsJoi = joi.array()
.items({
name: joi.string().required(),
email: joi.string().email().required(),
arn: joi.string().required(),
canonicalID: joi.string().required(),
shortid: joi.string().regex(/^[0-9]{12}$/).required(),
keys: this._joiKeysValidator,
// backward-compat
users: joi.array(),
})
.required()
.unique('arn')
.unique('email')
.unique('canonicalID');
.items({
name: joi.string().required(),
email: joi.string().email().required(),
arn: joi.string().required(),
canonicalID: joi.string().required(),
shortid: joi.string().regex(/^[0-9]{12}$/).required(),
keys: this._joiKeysValidator,
// backward-compat
users: joi.array(),
})
.required()
.unique('arn')
.unique('email')
.unique('canonicalID');
this._joiValidator = joi.object({ accounts: accountsJoi });
}
@ -136,7 +136,7 @@ class AuthLoader {
_validateData(authData, filePath) {
const res = joi.validate(authData, this._joiValidator,
{ abortEarly: false });
{ abortEarly: false });
if (res.error) {
this._dumpJoiErrors(res.error.details, filePath);
return false;
@ -156,7 +156,7 @@ class AuthLoader {
'master/conf/authdata.json). Also note that support ' +
'for account users has been dropped.',
{ accountName: account.name, accountArn: account.arn,
filePath });
filePath });
arnError = true;
return;
}
@ -167,7 +167,7 @@ class AuthLoader {
'https://github.com/scality/S3/blob/master/conf/' +
'authdata.json)',
{ accountName: account.name, accountArn: account.arn,
filePath });
filePath });
arnError = true;
return;
}
@ -176,8 +176,8 @@ class AuthLoader {
this._log.error(
'authentication config validation error',
{ reason: arnObj.error.description,
accountName: account.name, accountArn: account.arn,
filePath });
accountName: account.name, accountArn: account.arn,
filePath });
arnError = true;
return;
}
@ -185,8 +185,8 @@ class AuthLoader {
this._log.error(
'authentication config validation error',
{ reason: 'not an IAM account ARN',
accountName: account.name, accountArn: account.arn,
filePath });
accountName: account.name, accountArn: account.arn,
filePath });
arnError = true;
return;
}
@ -215,7 +215,7 @@ class AuthLoader {
logInfo.context = err.context;
}
this._log.error('authentication config validation error',
logInfo);
logInfo);
});
}
}

View File

@ -41,7 +41,7 @@ function getCanonicalizedAmzHeaders(headers, clientType) {
// Build headerString
return amzHeaders.reduce((headerStr, current) =>
`${headerStr}${current[0]}:${current[1]}\n`,
'');
'');
}
module.exports = getCanonicalizedAmzHeaders;

View File

@ -22,9 +22,9 @@ function check(request, log, data) {
timestamp = Date.parse(timestamp);
if (!timestamp) {
log.debug('missing or invalid date header',
{ method: 'auth/v2/headerAuthCheck.check' });
{ method: 'auth/v2/headerAuthCheck.check' });
return { err: errors.AccessDenied.
customizeDescription('Authentication requires a valid Date or ' +
customizeDescription('Authentication requires a valid Date or ' +
'x-amz-date header') };
}

View File

@ -42,12 +42,12 @@ function check(request, log, data) {
if (expirationTime > currentTime + preSignedURLExpiry) {
log.debug('expires parameter too far in future',
{ expires: request.query.Expires });
{ expires: request.query.Expires });
return { err: errors.AccessDenied };
}
if (currentTime > expirationTime) {
log.debug('current time exceeds expires time',
{ expires: request.query.Expires });
{ expires: request.query.Expires });
return { err: errors.RequestTimeTooSkewed };
}
const accessKey = data.AWSAccessKeyId;

View File

@ -88,14 +88,14 @@ function check(request, log, data, awsService) {
}
if (!timestamp) {
log.debug('missing or invalid date header',
{ method: 'auth/v4/headerAuthCheck.check' });
{ method: 'auth/v4/headerAuthCheck.check' });
return { err: errors.AccessDenied.
customizeDescription('Authentication requires a valid Date or ' +
customizeDescription('Authentication requires a valid Date or ' +
'x-amz-date header') };
}
const validationResult = validateCredentials(credentialsArr, timestamp,
log);
log);
if (validationResult instanceof Error) {
log.debug('credentials in improper format', { credentialsArr,
timestamp, validationResult });

View File

@ -45,7 +45,7 @@ function check(request, log, data) {
}
const validationResult = validateCredentials(credential, timestamp,
log);
log);
if (validationResult instanceof Error) {
log.debug('credentials in improper format', { credential,
timestamp, validationResult });

View File

@ -25,20 +25,20 @@ function validateCredentials(credentials, timestamp, log) {
log.warn('accessKey provided is wrong format', { accessKey });
return errors.InvalidArgument;
}
// The scope date (format YYYYMMDD) must be same date as the timestamp
// on the request from the x-amz-date param (if queryAuthCheck)
// or from the x-amz-date header or date header (if headerAuthCheck)
// Format of timestamp is ISO 8601: YYYYMMDDTHHMMSSZ.
// http://docs.aws.amazon.com/AmazonS3/latest/API/
// sigv4-query-string-auth.html
// http://docs.aws.amazon.com/general/latest/gr/
// sigv4-date-handling.html
// The scope date (format YYYYMMDD) must be same date as the timestamp
// on the request from the x-amz-date param (if queryAuthCheck)
// or from the x-amz-date header or date header (if headerAuthCheck)
// Format of timestamp is ISO 8601: YYYYMMDDTHHMMSSZ.
// http://docs.aws.amazon.com/AmazonS3/latest/API/
// sigv4-query-string-auth.html
// http://docs.aws.amazon.com/general/latest/gr/
// sigv4-date-handling.html
// convert timestamp to format of scopeDate YYYYMMDD
// convert timestamp to format of scopeDate YYYYMMDD
const timestampDate = timestamp.split('T')[0];
if (scopeDate.length !== 8 || scopeDate !== timestampDate) {
log.warn('scope date must be the same date as the timestamp date',
{ scopeDate, timestampDate });
{ scopeDate, timestampDate });
return errors.RequestTimeTooSkewed;
}
if (service !== 's3' && service !== 'iam' && service !== 'ring' &&
@ -50,7 +50,7 @@ function validateCredentials(credentials, timestamp, log) {
}
if (requestType !== 'aws4_request') {
log.warn('requestType contained in params is not aws4_request',
{ requestType });
{ requestType });
return errors.InvalidArgument;
}
return {};
@ -68,7 +68,7 @@ function extractQueryParams(queryObj, log) {
// Do not need the algorithm sent back
if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') {
log.warn('algorithm param incorrect',
{ algo: queryObj['X-Amz-Algorithm'] });
{ algo: queryObj['X-Amz-Algorithm'] });
return authParams;
}

View File

@ -64,12 +64,12 @@ class IndexTransaction {
push(op) {
if (this.closed) {
throw propError('pushOnCommittedTransaction',
'can not add ops to already committed transaction');
'can not add ops to already committed transaction');
}
if (op.type !== 'put' && op.type !== 'del') {
throw propError('invalidTransactionVerb',
`unknown action type: ${op.type}`);
`unknown action type: ${op.type}`);
}
if (op.key === undefined) {
@ -137,7 +137,7 @@ class IndexTransaction {
addCondition(condition) {
if (this.closed) {
throw propError('pushOnCommittedTransaction',
'can not add conditions to already committed transaction');
'can not add conditions to already committed transaction');
}
if (condition === undefined || Object.keys(condition).length === 0) {
throw propError('missingCondition', 'missing condition for conditional put');
@ -159,12 +159,12 @@ class IndexTransaction {
commit(cb) {
if (this.closed) {
return cb(propError('alreadyCommitted',
'transaction was already committed'));
'transaction was already committed'));
}
if (this.operations.length === 0) {
return cb(propError('emptyTransaction',
'tried to commit an empty transaction'));
'tried to commit an empty transaction'));
}
this.closed = true;

View File

@ -76,11 +76,11 @@ function errorsGen() {
const errorsObj = require('../errors/arsenalErrors.json');
Object.keys(errorsObj)
.filter(index => index !== '_comment')
.forEach(index => {
errors[index] = new ArsenalError(index, errorsObj[index].code,
errorsObj[index].description);
});
.filter(index => index !== '_comment')
.forEach(index => {
errors[index] = new ArsenalError(index, errorsObj[index].code,
errorsObj[index].description);
});
return errors;
}

View File

@ -17,9 +17,9 @@ describe('decyrptSecret', () => {
describe('parseServiceCredentials', () => {
const conf = {
users: [{ accessKey,
accountType: 'service-clueso',
secretKey,
userName: 'Search Service Account' }],
accountType: 'service-clueso',
secretKey,
userName: 'Search Service Account' }],
};
const auth = JSON.stringify({ privateKey });

View File

@ -25,7 +25,7 @@ module.exports.once = function once(func) {
state.res = func.apply(func, args);
} else {
debug('function already called:', func,
'returning cached result:', state.res);
'returning cached result:', state.res);
}
return state.res;
};

View File

@ -17,7 +17,7 @@ class RedisClient {
method: 'RedisClient.constructor',
redisHost: config.host,
redisPort: config.port,
})
}),
);
return this;
}

View File

@ -1,5 +1,5 @@
const StatsClient = require('./StatsClient');
/**
/**
* @class StatsModel
*
* @classdesc Extend and overwrite how timestamps are normalized by minutes

View File

@ -2,8 +2,8 @@ const promClient = require('prom-client');
const collectDefaultMetricsIntervalMs =
process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined ?
Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10) :
10000;
Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10) :
10000;
promClient.collectDefaultMetrics({ timeout: collectDefaultMetricsIntervalMs });

View File

@ -27,7 +27,7 @@ class ARN {
static createFromString(arnStr) {
const [arn, partition, service, region, accountId,
resourceType, resource] = arnStr.split(':');
resourceType, resource] = arnStr.split(':');
if (arn !== 'arn') {
return { error: errors.InvalidArgument.customizeDescription(
@ -58,7 +58,7 @@ class ARN {
'must be a 12-digit number or "*"') };
}
const fullResource = (resource !== undefined ?
`${resourceType}:${resource}` : resourceType);
`${resourceType}:${resource}` : resourceType);
return new ARN(partition, service, region, accountId, fullResource);
}
@ -98,7 +98,7 @@ class ARN {
toString() {
return ['arn', this.getPartition(), this.getService(),
this.getRegion(), this.getAccountId(), this.getResource()]
this.getRegion(), this.getAccountId(), this.getResource()]
.join(':');
}
}

View File

@ -62,12 +62,12 @@ class BucketInfo {
* @param {object} [notificationConfiguration] - bucket notification configuration
*/
constructor(name, owner, ownerDisplayName, creationDate,
mdBucketModelVersion, acl, transient, deleted,
serverSideEncryption, versioningConfiguration,
locationConstraint, websiteConfiguration, cors,
replicationConfiguration, lifecycleConfiguration,
bucketPolicy, uid, objectLockEnabled, objectLockConfiguration,
notificationConfiguration) {
mdBucketModelVersion, acl, transient, deleted,
serverSideEncryption, versioningConfiguration,
locationConstraint, websiteConfiguration, cors,
replicationConfiguration, lifecycleConfiguration,
bucketPolicy, uid, objectLockEnabled,
objectLockConfiguration, notificationConfiguration) {
assert.strictEqual(typeof name, 'string');
assert.strictEqual(typeof owner, 'string');
assert.strictEqual(typeof ownerDisplayName, 'string');
@ -86,7 +86,7 @@ class BucketInfo {
if (serverSideEncryption) {
assert.strictEqual(typeof serverSideEncryption, 'object');
const { cryptoScheme, algorithm, masterKeyId,
configuredMasterKeyId, mandatory } = serverSideEncryption;
configuredMasterKeyId, mandatory } = serverSideEncryption;
assert.strictEqual(typeof cryptoScheme, 'number');
assert.strictEqual(typeof algorithm, 'string');
assert.strictEqual(typeof masterKeyId, 'string');

View File

@ -375,7 +375,7 @@ class LifecycleConfiguration {
if (!tags[i].Key || !tags[i].Value) {
tagObj.error =
errors.MissingRequiredParameter.customizeDescription(
'Tag XML does not contain both Key and Value');
'Tag XML does not contain both Key and Value');
break;
}
@ -611,7 +611,7 @@ class LifecycleConfiguration {
const daysInt = parseInt(subExp.Days[0], 10);
if (daysInt < 1) {
expObj.error = errors.InvalidArgument.customizeDescription(
'Expiration days is not a positive integer');
'Expiration days is not a positive integer');
} else {
expObj.days = daysInt;
}

View File

@ -27,7 +27,7 @@ const errors = require('../errors');
* </NotificationConfiguration>
*/
/**
/**
* Format of config:
*
* config = {

View File

@ -17,7 +17,7 @@ const errors = require('../errors');
* </ObjectLockConfiguration>
*/
/**
/**
* Format of config:
*
* config = {

View File

@ -8,7 +8,6 @@ const ObjectMDLocation = require('./ObjectMDLocation');
* mpuPart metadata for example)
*/
class ObjectMD {
/**
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
* reserved for internal use, users should call
@ -138,7 +137,7 @@ class ObjectMD {
Object.assign(this._data, objMd._data);
Object.assign(this._data.replicationInfo,
objMd._data.replicationInfo);
objMd._data.replicationInfo);
}
_updateFromParsedJSON(objMd) {

View File

@ -3,7 +3,6 @@
* 'location' array
*/
class ObjectMDLocation {
/**
* @constructor
* @param {object} locationObj - single data location info

View File

@ -111,7 +111,7 @@ class RoundRobin {
pickHost() {
if (this.logger) {
this.logger.debug('pick host',
{ host: this.getCurrentHost() });
{ host: this.getCurrentHost() });
}
const curHost = this.getCurrentHost();
++this.pickCount;
@ -163,7 +163,7 @@ class RoundRobin {
}
if (this.logger) {
this.logger.debug('round robin host',
{ newHost: this.getCurrentHost() });
{ newHost: this.getCurrentHost() });
}
}
}

View File

@ -10,7 +10,6 @@ const { checkSupportIPv6 } = require('./utils');
class Server {
/**
* @constructor
*
@ -429,16 +428,16 @@ class Server {
// Setting no delay of the socket to the value configured
sock.setNoDelay(this.isNoDelay());
sock.on('error', err => this._logger.info(
'socket error - request rejected', { error: err }));
'socket error - request rejected', { error: err }));
});
this._server.on('tlsClientError', (err, sock) =>
this._onClientError(err, sock));
this._onClientError(err, sock));
this._server.on('clientError', (err, sock) =>
this._onClientError(err, sock));
this._onClientError(err, sock));
this._server.on('checkContinue', (req, res) =>
this._onCheckContinue(req, res));
this._onCheckContinue(req, res));
this._server.on('checkExpectation', (req, res) =>
this._onCheckExpectation(req, res));
this._onCheckExpectation(req, res));
this._server.on('listening', () => this._onListening());
}
this._server.listen(this._port, this._address);

View File

@ -72,8 +72,8 @@ function getByteRangeFromSpec(rangeSpec, objectSize) {
if (rangeSpec.start < objectSize) {
// test is false if end is undefined
return { range: [rangeSpec.start,
(rangeSpec.end < objectSize ?
rangeSpec.end : objectSize - 1)] };
(rangeSpec.end < objectSize ?
rangeSpec.end : objectSize - 1)] };
}
return { error: errors.InvalidRange };
}

View File

@ -55,7 +55,12 @@ function _arsenalError(err) {
if (typeof err === 'string') {
return errors.InternalError
.customizeDescription(`${messagePrefix} ${err}`);
} else if (err instanceof Error) {
} else if (
err instanceof Error ||
// INFO: The second part is here only for Jest, to remove when we'll be
// fully migrated to TS
(err && typeof err.message === 'string')
) {
return errors.InternalError
.customizeDescription(`${messagePrefix} ${err.message}`);
}
@ -90,8 +95,8 @@ function _negotiateProtocolVersion(client, logger, cb) {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::negotiateProtocolVersion',
{ error,
vendorIdentification: client.vendorIdentification });
{ error,
vendorIdentification: client.vendorIdentification });
return cb(error);
}
const majorVersions =
@ -102,8 +107,8 @@ function _negotiateProtocolVersion(client, logger, cb) {
majorVersions.length !== minorVersions.length) {
const error = _arsenalError('No suitable protocol version');
logger.error('KMIP::negotiateProtocolVersion',
{ error,
vendorIdentification: client.vendorIdentification });
{ error,
vendorIdentification: client.vendorIdentification });
return cb(error);
}
client.kmip.changeProtocolVersion(majorVersions[0], minorVersions[0]);
@ -126,8 +131,8 @@ function _mapExtensions(client, logger, cb) {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::mapExtensions',
{ error,
vendorIdentification: client.vendorIdentification });
{ error,
vendorIdentification: client.vendorIdentification });
return cb(error);
}
const extensionNames = response.lookup(searchFilter.extensionName);
@ -135,8 +140,8 @@ function _mapExtensions(client, logger, cb) {
if (extensionNames.length !== extensionTags.length) {
const error = _arsenalError('Inconsistent extension list');
logger.error('KMIP::mapExtensions',
{ error,
vendorIdentification: client.vendorIdentification });
{ error,
vendorIdentification: client.vendorIdentification });
return cb(error);
}
extensionNames.forEach((extensionName, idx) => {
@ -160,7 +165,7 @@ function _queryServerInformation(client, logger, cb) {
if (err) {
const error = _arsenalError(err);
logger.warn('KMIP::queryServerInformation',
{ error });
{ error });
/* no error returned, caller can keep going */
return cb();
}
@ -170,9 +175,9 @@ function _queryServerInformation(client, logger, cb) {
JSON.stringify(response.lookup(searchFilter.serverInformation)[0]));
logger.info('KMIP Server identified',
{ vendorIdentification: client.vendorIdentification,
serverInformation: client.serverInformation,
negotiatedProtocolVersion: client.kmip.protocolVersion });
{ vendorIdentification: client.vendorIdentification,
serverInformation: client.serverInformation,
negotiatedProtocolVersion: client.kmip.protocolVersion });
return cb();
});
}
@ -196,8 +201,8 @@ function _queryOperationsAndObjects(client, logger, cb) {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::queryOperationsAndObjects',
{ error,
vendorIdentification: client.vendorIdentification });
{ error,
vendorIdentification: client.vendorIdentification });
return cb(error);
}
const supportedOperations = response.lookup(searchFilter.operation);
@ -222,15 +227,15 @@ function _queryOperationsAndObjects(client, logger, cb) {
logger.warn('KMIP::queryOperationsAndObjects: ' +
'The KMIP Server announces that it ' +
'does not support all of the required features',
{ vendorIdentification: client.vendorIdentification,
serverInformation: client.serverInformation,
supportsEncrypt, supportsDecrypt,
supportsActivate, supportsRevoke,
supportsCreate, supportsDestroy,
supportsQuery, supportsSymmetricKeys });
{ vendorIdentification: client.vendorIdentification,
serverInformation: client.serverInformation,
supportsEncrypt, supportsDecrypt,
supportsActivate, supportsRevoke,
supportsCreate, supportsDestroy,
supportsQuery, supportsSymmetricKeys });
} else {
logger.info('KMIP Server provides the necessary feature set',
{ vendorIdentification: client.vendorIdentification });
{ vendorIdentification: client.vendorIdentification });
}
return cb();
});
@ -264,8 +269,8 @@ class Client {
this.vendorIdentification = '';
this.serverInformation = [];
this.kmip = new KMIP(CodecClass || TTLVCodec,
TransportClass || TlsTransport,
options);
TransportClass || TlsTransport,
options);
this.kmip.registerHandshakeFunction((logger, cb) => {
this._kmipHandshake(logger, cb);
});
@ -322,8 +327,8 @@ class Client {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::_activateBucketKey',
{ error,
serverInformation: this.serverInformation });
{ error,
serverInformation: this.serverInformation });
return cb(error);
}
const uniqueIdentifier =
@ -332,7 +337,7 @@ class Client {
const error = _arsenalError(
'Server did not return the expected identifier');
logger.error('KMIP::cipherDataKey',
{ error, uniqueIdentifier });
{ error, uniqueIdentifier });
return cb(error);
}
return cb(null, keyIdentifier);
@ -351,20 +356,20 @@ class Client {
const attributes = [];
if (!!this.options.bucketNameAttributeName) {
attributes.push(KMIP.Attribute('TextString',
this.options.bucketNameAttributeName,
bucketName));
this.options.bucketNameAttributeName,
bucketName));
}
attributes.push(...[
KMIP.Attribute('Enumeration', 'Cryptographic Algorithm',
CRYPTOGRAPHIC_ALGORITHM),
CRYPTOGRAPHIC_ALGORITHM),
KMIP.Attribute('Integer', 'Cryptographic Length',
CRYPTOGRAPHIC_LENGTH),
CRYPTOGRAPHIC_LENGTH),
KMIP.Attribute('Integer', 'Cryptographic Usage Mask',
this.kmip.encodeMask('Cryptographic Usage Mask',
CRYPTOGRAPHIC_USAGE_MASK))]);
this.kmip.encodeMask('Cryptographic Usage Mask',
CRYPTOGRAPHIC_USAGE_MASK))]);
if (this.options.compoundCreateActivate) {
attributes.push(KMIP.Attribute('Date-Time', 'Activation Date',
new Date(Date.UTC())));
new Date(Date.UTC())));
}
return this.kmip.request(logger, 'Create', [
@ -374,8 +379,8 @@ class Client {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::createBucketKey',
{ error,
serverInformation: this.serverInformation });
{ error,
serverInformation: this.serverInformation });
return cb(error);
}
const createdObjectType =
@ -386,7 +391,7 @@ class Client {
const error = _arsenalError(
'Server created an object of wrong type');
logger.error('KMIP::createBucketKey',
{ error, createdObjectType });
{ error, createdObjectType });
return cb(error);
}
if (!this.options.compoundCreateActivate) {
@ -411,16 +416,16 @@ class Client {
KMIP.TextString('Unique Identifier', bucketKeyId),
KMIP.Structure('Revocation Reason', [
KMIP.Enumeration('Revocation Reason Code',
'Cessation of Operation'),
'Cessation of Operation'),
KMIP.TextString('Revocation Message',
'About to be deleted'),
'About to be deleted'),
]),
], (err, response) => {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::_revokeBucketKey',
{ error,
serverInformation: this.serverInformation });
{ error,
serverInformation: this.serverInformation });
return cb(error);
}
const uniqueIdentifier =
@ -429,7 +434,7 @@ class Client {
const error = _arsenalError(
'Server did not return the expected identifier');
logger.error('KMIP::_revokeBucketKey',
{ error, uniqueIdentifier });
{ error, uniqueIdentifier });
return cb(error);
}
return cb();
@ -448,8 +453,8 @@ class Client {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::destroyBucketKey: revocation failed',
{ error,
serverInformation: this.serverInformation });
{ error,
serverInformation: this.serverInformation });
return cb(error);
}
return this.kmip.request(logger, 'Destroy', [
@ -458,8 +463,8 @@ class Client {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::destroyBucketKey',
{ error,
serverInformation: this.serverInformation });
{ error,
serverInformation: this.serverInformation });
return cb(error);
}
const uniqueIdentifier =
@ -468,7 +473,7 @@ class Client {
const error = _arsenalError(
'Server did not return the expected identifier');
logger.error('KMIP::destroyBucketKey',
{ error, uniqueIdentifier });
{ error, uniqueIdentifier });
return cb(error);
}
return cb();
@ -487,19 +492,19 @@ class Client {
* @callback called with (err, cipheredDataKey: Buffer)
*/
cipherDataKey(cryptoScheme,
masterKeyId,
plainTextDataKey,
logger,
cb) {
masterKeyId,
plainTextDataKey,
logger,
cb) {
return this.kmip.request(logger, 'Encrypt', [
KMIP.TextString('Unique Identifier', masterKeyId),
KMIP.Structure('Cryptographic Parameters', [
KMIP.Enumeration('Block Cipher Mode',
CRYPTOGRAPHIC_CIPHER_MODE),
CRYPTOGRAPHIC_CIPHER_MODE),
KMIP.Enumeration('Padding Method',
CRYPTOGRAPHIC_PADDING_METHOD),
CRYPTOGRAPHIC_PADDING_METHOD),
KMIP.Enumeration('Cryptographic Algorithm',
CRYPTOGRAPHIC_ALGORITHM),
CRYPTOGRAPHIC_ALGORITHM),
]),
KMIP.ByteString('Data', plainTextDataKey),
KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV),
@ -507,8 +512,8 @@ class Client {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::cipherDataKey',
{ error,
serverInformation: this.serverInformation });
{ error,
serverInformation: this.serverInformation });
return cb(error);
}
const uniqueIdentifier =
@ -518,7 +523,7 @@ class Client {
const error = _arsenalError(
'Server did not return the expected identifier');
logger.error('KMIP::cipherDataKey',
{ error, uniqueIdentifier });
{ error, uniqueIdentifier });
return cb(error);
}
return cb(null, data);
@ -536,19 +541,19 @@ class Client {
* @callback called with (err, plainTextDataKey: Buffer)
*/
decipherDataKey(cryptoScheme,
masterKeyId,
cipheredDataKey,
logger,
cb) {
masterKeyId,
cipheredDataKey,
logger,
cb) {
return this.kmip.request(logger, 'Decrypt', [
KMIP.TextString('Unique Identifier', masterKeyId),
KMIP.Structure('Cryptographic Parameters', [
KMIP.Enumeration('Block Cipher Mode',
CRYPTOGRAPHIC_CIPHER_MODE),
CRYPTOGRAPHIC_CIPHER_MODE),
KMIP.Enumeration('Padding Method',
CRYPTOGRAPHIC_PADDING_METHOD),
CRYPTOGRAPHIC_PADDING_METHOD),
KMIP.Enumeration('Cryptographic Algorithm',
CRYPTOGRAPHIC_ALGORITHM),
CRYPTOGRAPHIC_ALGORITHM),
]),
KMIP.ByteString('Data', cipheredDataKey),
KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV),
@ -556,8 +561,8 @@ class Client {
if (err) {
const error = _arsenalError(err);
logger.error('KMIP::decipherDataKey',
{ error,
serverInformation: this.serverInformation });
{ error,
serverInformation: this.serverInformation });
return cb(error);
}
const uniqueIdentifier =
@ -567,7 +572,7 @@ class Client {
const error = _arsenalError(
'Server did not return the right identifier');
logger.error('KMIP::decipherDataKey',
{ error, uniqueIdentifier });
{ error, uniqueIdentifier });
return cb(error);
}
return cb(null, data);

View File

@ -55,15 +55,15 @@ function TTLVCodec() {
const property = {};
if (!TypeDecoder[elementType]) {
_throwError(logger,
'Unknown element type',
{ funcName, elementTag, elementType });
'Unknown element type',
{ funcName, elementTag, elementType });
}
const elementValue = value.slice(i + 8,
i + 8 + elementLength);
i + 8 + elementLength);
if (elementValue.length !== elementLength) {
_throwError(logger, 'BUG: Wrong buffer size',
{ funcName, elementLength,
bufferLength: elementValue.length });
{ funcName, elementLength,
bufferLength: elementValue.length });
}
property.type = TypeDecoder[elementType].name;
property.value = TypeDecoder[elementType]
@ -75,7 +75,7 @@ function TTLVCodec() {
const tagInfo = TagDecoder[elementTag];
if (!tagInfo) {
logger.debug('Unknown element tag',
{ funcName, elementTag });
{ funcName, elementTag });
property.tag = elementTag;
element['Unknown Tag'] = property;
} else {
@ -83,8 +83,8 @@ function TTLVCodec() {
if (tagInfo.name === 'Attribute Name') {
if (property.type !== 'TextString') {
_throwError(logger,
'Invalide type',
{ funcName, type: property.type });
'Invalide type',
{ funcName, type: property.type });
}
diversion = property.value;
}
@ -114,8 +114,8 @@ function TTLVCodec() {
}
const itemResult =
TypeEncoder[itemType].encode(itemTagName,
itemValue,
itemDiversion);
itemValue,
itemDiversion);
encodedValue = encodedValue
.concat(_ttlvPadVector(itemResult));
});
@ -133,9 +133,9 @@ function TTLVCodec() {
const fixedLength = 4;
if (fixedLength !== value.length) {
_throwError(logger,
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
}
return value.readUInt32BE(0);
},
@ -156,16 +156,16 @@ function TTLVCodec() {
const fixedLength = 8;
if (fixedLength !== value.length) {
_throwError(logger,
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
}
const longUInt = UINT32_MAX * value.readUInt32BE(0) +
value.readUInt32BE(4);
if (longUInt > Number.MAX_SAFE_INTEGER) {
_throwError(logger,
'53-bit overflow',
{ funcName, longUInt });
'53-bit overflow',
{ funcName, longUInt });
}
return longUInt;
},
@ -200,9 +200,9 @@ function TTLVCodec() {
const fixedLength = 4;
if (fixedLength !== value.length) {
_throwError(logger,
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
}
const enumValue = value.toString('hex');
const actualTag = diversion ? TagEncoder[diversion].value : tag;
@ -211,10 +211,10 @@ function TTLVCodec() {
!enumInfo.enumeration ||
!enumInfo.enumeration[enumValue]) {
return { tag,
value: enumValue,
message: 'Unknown enumeration value',
diversion,
};
value: enumValue,
message: 'Unknown enumeration value',
diversion,
};
}
return enumInfo.enumeration[enumValue];
},
@ -227,7 +227,7 @@ function TTLVCodec() {
const actualTag = diversion || tagName;
const encodedValue =
Buffer.from(TagEncoder[actualTag].enumeration[value],
'hex');
'hex');
return _ttlvPadVector([tag, type, length, encodedValue]);
},
},
@ -238,9 +238,9 @@ function TTLVCodec() {
const fixedLength = 8;
if (fixedLength !== value.length) {
_throwError(logger,
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
}
const msUInt = value.readUInt32BE(0);
const lsUInt = value.readUInt32BE(4);
@ -267,7 +267,7 @@ function TTLVCodec() {
const length = Buffer.alloc(4);
length.writeUInt32BE(value.length);
return _ttlvPadVector([tag, type, length,
Buffer.from(value, 'utf8')]);
Buffer.from(value, 'utf8')]);
},
},
'08': {
@ -289,17 +289,17 @@ function TTLVCodec() {
const fixedLength = 8;
if (fixedLength !== value.length) {
_throwError(logger,
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
}
const d = new Date(0);
const utcSeconds = UINT32_MAX * value.readUInt32BE(0) +
value.readUInt32BE(4);
if (utcSeconds > Number.MAX_SAFE_INTEGER) {
_throwError(logger,
'53-bit overflow',
{ funcName, utcSeconds });
'53-bit overflow',
{ funcName, utcSeconds });
}
d.setUTCSeconds(utcSeconds);
return d;
@ -323,9 +323,9 @@ function TTLVCodec() {
const fixedLength = 4;
if (fixedLength !== value.length) {
_throwError(logger,
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
'Length mismatch',
{ funcName, fixedLength,
bufferLength: value.length });
}
return value.readInt32BE(0);
},
@ -415,8 +415,8 @@ function TTLVCodec() {
throw Error(`Unknown Type '${type}'`);
}
const itemValue = TypeEncoder[type].encode(key,
item[key].value,
item[key].diversion);
item[key].value,
item[key].diversion);
result = result.concat(_ttlvPadVector(itemValue));
});
});

View File

@ -275,11 +275,11 @@ class KMIP {
KMIP.Structure('Request Header', [
KMIP.Structure('Protocol Version', [
KMIP.Integer('Protocol Version Major',
this.protocolVersion.major),
this.protocolVersion.major),
KMIP.Integer('Protocol Version Minor',
this.protocolVersion.minor)]),
this.protocolVersion.minor)]),
KMIP.Integer('Maximum Response Size',
this.maximumResponseSize),
this.maximumResponseSize),
KMIP.Integer('Batch Count', 1)]),
KMIP.Structure('Batch Item', [
KMIP.Enumeration('Operation', operation),
@ -292,7 +292,7 @@ class KMIP {
(err, conversation, rawResponse) => {
if (err) {
logger.error('KMIP::request: Failed to send message',
{ error: err });
{ error: err });
return cb(err);
}
const response = this._decodeMessage(logger, rawResponse);
@ -311,16 +311,16 @@ class KMIP {
this.transport.abortPipeline(conversation);
const error = Error('Invalid batch item ID returned');
logger.error('KMIP::request: failed',
{ resultUniqueBatchItemID, uuid, error });
{ resultUniqueBatchItemID, uuid, error });
return cb(error);
}
if (performedOperation !== operation) {
this.transport.abortPipeline(conversation);
const error = Error('Operation mismatch',
{ got: performedOperation,
expected: operation });
{ got: performedOperation,
expected: operation });
logger.error('KMIP::request: Operation mismatch',
{ error });
{ error });
return cb(error);
}
if (resultStatus !== 'Success') {
@ -331,19 +331,17 @@ class KMIP {
response.lookup(
'Response Message/Batch Item/Result Message')[0];
const error = Error('KMIP request failure',
{ resultStatus,
resultReason,
resultMessage });
{ resultStatus,
resultReason,
resultMessage });
logger.error('KMIP::request: request failed',
{ error, resultStatus,
resultReason, resultMessage });
{ error, resultStatus,
resultReason, resultMessage });
return cb(error);
}
return cb(null, response);
});
}
}

View File

@ -86,8 +86,8 @@ class TransportTemplate {
const deferedRequest = this.deferedRequests.shift();
process.nextTick(() => {
this.send(logger,
deferedRequest.encodedMessage,
deferedRequest.cb);
deferedRequest.encodedMessage,
deferedRequest.cb);
});
} else if (this.callbackPipeline.length === 0 &&
this.deferedRequests.length === 0 &&

View File

@ -19,7 +19,7 @@ function setContentRange(response, byteRange, objectSize) {
const [start, end] = byteRange;
assert(start !== undefined && end !== undefined);
response.setHeader('Content-Range',
`bytes ${start}-${end}/${objectSize}`);
`bytes ${start}-${end}/${objectSize}`);
}
function sendError(res, log, error, optMessage) {
@ -81,7 +81,6 @@ function parseURL(urlStr, expectKey) {
* start() to start listening to the configured port.
*/
class RESTServer extends httpServer {
/**
* @constructor
* @param {Object} params - constructor params
@ -263,7 +262,7 @@ class RESTServer extends httpServer {
return sendError(res, log, err);
}
log.debug('sending back 200/206 response with contents',
{ key: pathInfo.key });
{ key: pathInfo.key });
setContentLength(res, contentLength);
res.setHeader('Accept-Ranges', 'bytes');
if (byteRange) {
@ -301,7 +300,7 @@ class RESTServer extends httpServer {
return sendError(res, log, err);
}
log.debug('sending back 204 response to DELETE',
{ key: pathInfo.key });
{ key: pathInfo.key });
res.writeHead(204);
return res.end(() => {
log.debug('DELETE response sent', { key: pathInfo.key });

View File

@ -8,7 +8,7 @@ module.exports.explodePath = function explodePath(path) {
return {
service: pathMatch[1],
key: (pathMatch[3] !== undefined && pathMatch[3].length > 0 ?
pathMatch[3] : undefined),
pathMatch[3] : undefined),
};
}
throw errors.InvalidURI.customizeDescription('malformed URI');

View File

@ -17,7 +17,6 @@ const rpc = require('./rpc.js');
* RPC client object accessing the sub-level transparently.
*/
class LevelDbClient extends rpc.BaseClient {
/**
* @constructor
*
@ -78,7 +77,6 @@ class LevelDbClient extends rpc.BaseClient {
* env.subDb (env is passed as first parameter of received RPC calls).
*/
class LevelDbService extends rpc.BaseService {
/**
* @constructor
*

View File

@ -37,7 +37,6 @@ let streamRPCJSONObj;
* an error occurred).
*/
class BaseClient extends EventEmitter {
/**
* @constructor
*
@ -54,7 +53,7 @@ class BaseClient extends EventEmitter {
*/
constructor(params) {
const { url, logger, callTimeoutMs,
streamMaxPendingAck, streamAckTimeoutMs } = params;
streamMaxPendingAck, streamAckTimeoutMs } = params;
assert(url);
assert(logger);
@ -82,11 +81,11 @@ class BaseClient extends EventEmitter {
_call(remoteCall, args, cb) {
const wrapCb = (err, data) => {
cb(reconstructError(err),
this.socketStreams.decodeStreams(data));
this.socketStreams.decodeStreams(data));
};
this.logger.debug('remote call', { remoteCall, args });
this.socket.emit('call', remoteCall,
this.socketStreams.encodeStreams(args), wrapCb);
this.socketStreams.encodeStreams(args), wrapCb);
return undefined;
}
@ -113,8 +112,8 @@ class BaseClient extends EventEmitter {
throw new Error(`argument cb=${cb} is not a callback`);
}
async.timeout(this._call.bind(this), timeoutMs,
`operation ${remoteCall} timed out`)(remoteCall,
args, cb);
`operation ${remoteCall} timed out`)(remoteCall,
args, cb);
return undefined;
}
@ -142,7 +141,7 @@ class BaseClient extends EventEmitter {
const url = this.url;
this.socket.on('error', err => {
this.logger.warn('connectivity error to the RPC service',
{ url, error: err });
{ url, error: err });
});
this.socket.on('connect', () => {
this.emit('connect');
@ -156,7 +155,7 @@ class BaseClient extends EventEmitter {
this.getManifest((err, manifest) => {
if (err) {
this.logger.error('Error fetching manifest from RPC server',
{ error: err });
{ error: err });
} else {
manifest.api.forEach(apiItem => {
this.createCall(apiItem.name);
@ -251,7 +250,6 @@ class BaseClient extends EventEmitter {
*
*/
class BaseService {
/**
* @constructor
*
@ -497,7 +495,7 @@ function RPCServer(params) {
conn.on('error', err => {
log.error('error on socket.io connection',
{ namespace: service.namespace, error: err });
{ namespace: service.namespace, error: err });
});
conn.on('call', (remoteCall, args, cb) => {
const decodedArgs = streamsSocket.decodeStreams(args);
@ -647,8 +645,8 @@ streamRPCJSONObj = function _streamRPCJSONObj(obj, wstream, cb) {
// primitive types
if (obj === undefined) {
wstream.write('null'); // if undefined elements are present in
// arrays, convert them to JSON null
// objects
// arrays, convert them to JSON null
// objects
} else {
wstream.write(JSON.stringify(obj));
}

View File

@ -16,7 +16,7 @@ class SIOOutputStream extends stream.Writable {
constructor(socket, streamId, maxPendingAck, ackTimeoutMs) {
super({ objectMode: true });
this._initOutputStream(socket, streamId, maxPendingAck,
ackTimeoutMs);
ackTimeoutMs);
}
_initOutputStream(socket, streamId, maxPendingAck, ackTimeoutMs) {
@ -194,7 +194,7 @@ class SIOStreamSocket {
this.socket.on('stream-data', (payload, cb) => {
const { streamId, data } = payload;
log.debug('received \'stream-data\' event',
{ streamId, size: data.length });
{ streamId, size: data.length });
const stream = this.remoteStreams[streamId];
if (!stream) {
log.debug('no such remote stream registered', { streamId });
@ -280,15 +280,15 @@ class SIOStreamSocket {
let transportStream;
if (isReadStream) {
transportStream = new SIOOutputStream(this, streamId,
this.maxPendingAck,
this.ackTimeoutMs);
this.maxPendingAck,
this.ackTimeoutMs);
} else {
transportStream = new SIOInputStream(this, streamId);
}
this.localStreams[streamId] = arg;
arg.once('close', () => {
log.debug('stream closed, removing from local streams',
{ streamId });
{ streamId });
delete this.localStreams[streamId];
});
arg.on('error', error => {
@ -350,8 +350,8 @@ class SIOStreamSocket {
stream = new SIOInputStream(this, streamId);
} else if (arg.writable) {
stream = new SIOOutputStream(this, streamId,
this.maxPendingAck,
this.ackTimeoutMs);
this.maxPendingAck,
this.ackTimeoutMs);
} else {
throw new Error('can\'t decode stream neither readable ' +
'nor writable');
@ -360,14 +360,14 @@ class SIOStreamSocket {
if (arg.readable) {
stream.once('close', () => {
log.debug('stream closed, removing from remote streams',
{ streamId });
{ streamId });
delete this.remoteStreams[streamId];
});
}
if (arg.writable) {
stream.once('finish', () => {
log.debug('stream finished, removing from remote streams',
{ streamId });
{ streamId });
delete this.remoteStreams[streamId];
});
}
@ -399,7 +399,7 @@ class SIOStreamSocket {
_write(streamId, data, cb) {
this.logger.debug('emit \'stream-data\' event',
{ streamId, size: data.length });
{ streamId, size: data.length });
this.socket.emit('stream-data', { streamId, data }, cb);
}

View File

@ -1,8 +1,8 @@
'use strict'; // eslint-disable-line strict
const Ajv = require('ajv');
const userPolicySchema = require('./userPolicySchema');
const resourcePolicySchema = require('./resourcePolicySchema');
const userPolicySchema = require('./userPolicySchema.json');
const resourcePolicySchema = require('./resourcePolicySchema.json');
const errors = require('../errors');
const ajValidate = new Ajv({ allErrors: true });

View File

@ -50,7 +50,7 @@ evaluators.isResourceApplicable = (requestContext, statementResource, log) => {
requestResourceArr, true);
if (arnSegmentsMatch) {
log.trace('policy resource is applicable to request',
{ requestResource: resource, policyResource });
{ requestResource: resource, policyResource });
return true;
}
continue;
@ -224,21 +224,21 @@ evaluators.evaluatePolicy = (requestContext, policy, log) => {
// in policy, move on to next statement
if (currentStatement.NotResource &&
evaluators.isResourceApplicable(requestContext,
currentStatement.NotResource, log)) {
currentStatement.NotResource, log)) {
continue;
}
// If affirmative action is in policy and request action is not
// applicable, move on to next statement
if (currentStatement.Action &&
!evaluators.isActionApplicable(requestContext.getAction(),
currentStatement.Action, log)) {
currentStatement.Action, log)) {
continue;
}
// If NotAction is in policy and action matches NotAction in policy,
// move on to next statement
if (currentStatement.NotAction &&
evaluators.isActionApplicable(requestContext.getAction(),
currentStatement.NotAction, log)) {
currentStatement.NotAction, log)) {
continue;
}
const conditionEval = currentStatement.Condition ?

View File

@ -39,11 +39,11 @@ conditions.findConditionKey = (key, requestContext) => {
// (see Boolean Condition Operators).
// Note: This key is only present if MFA was used. So, the following
// will not work:
// "Condition" :
// { "Bool" : { "aws:MultiFactorAuthPresent" : false } }
// "Condition" :
// { "Bool" : { "aws:MultiFactorAuthPresent" : false } }
// Instead use:
// "Condition" :
// { "Null" : { "aws:MultiFactorAuthPresent" : true } }
// "Condition" :
// { "Null" : { "aws:MultiFactorAuthPresent" : true } }
map.set('aws:MultiFactorAuthPresent',
requestContext.getMultiFactorAuthPresent());
// aws:MultiFactorAuthAge Used to check how many seconds since
@ -164,8 +164,8 @@ conditions.findConditionKey = (key, requestContext) => {
// so evaluation should be skipped
map.set('s3:RequestObjectTagKeys',
requestContext.getNeedTagEval() && requestContext.getRequestObjTags()
? getTagKeys(requestContext.getRequestObjTags())
: undefined);
? getTagKeys(requestContext.getRequestObjTags())
: undefined);
return map.get(key);
};
@ -189,7 +189,7 @@ function convertSpecialChars(string) {
return map[char];
}
return string.replace(/(\$\{\*\})|(\$\{\?\})|(\$\{\$\})/g,
characterMap);
characterMap);
}
/**
@ -423,10 +423,10 @@ conditions.convertConditionOperator = operator => {
return !operatorMap.ArnLike(key, value);
},
Null: function nullOperator(key, value) {
// Null is used to check if a condition key is present.
// The policy statement value should be either true (the key doesn't
// exist — it is null) or false (the key exists and its value is
// not null).
// Null is used to check if a condition key is present.
// The policy statement value should be either true (the key doesn't
// exist — it is null) or false (the key exists and its value is
// not null).
if ((key === undefined || key === null)
&& value[0] === 'true' ||
(key !== undefined && key !== null)

View File

@ -51,10 +51,10 @@ wildcards.handleWildcardInResource = arn => {
// Wildcards can be part of the resource ARN.
// Wildcards do NOT span segments of the ARN (separated by ":")
// Example: all elements in specific bucket:
// "Resource": "arn:aws:s3:::my_corporate_bucket/*"
// ARN format:
// arn:partition:service:region:namespace:relative-id
// Example: all elements in specific bucket:
// "Resource": "arn:aws:s3:::my_corporate_bucket/*"
// ARN format:
// arn:partition:service:region:namespace:relative-id
const arnArr = arn.split(':');
return arnArr.map(portion => wildcards.handleWildcards(portion));
};

View File

@ -6,7 +6,6 @@ const crypto = require('crypto');
* data through a stream
*/
class MD5Sum extends Transform {
/**
* @constructor
*/
@ -40,7 +39,6 @@ class MD5Sum extends Transform {
this.emit('hashed');
callback(null);
}
}
module.exports = MD5Sum;

View File

@ -73,7 +73,7 @@ class ResultsCollector extends EventEmitter {
* @property {Error} [results[].error] - error returned by Azure putting subpart
* @property {number} results[].subPartIndex - index of the subpart
*/
/**
/**
* "error" event
* @event ResultCollector#error
* @type {(Error|undefined)} error - error returned by Azure last subpart

View File

@ -94,7 +94,7 @@ azureMpuUtils.getSubPartIds = (part, uploadId) =>
azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex));
azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName,
log, cb) => {
log, cb) => {
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
= params;
const blockId = azureMpuUtils.getBlockId(uploadId, partNumber, 0);
@ -107,31 +107,31 @@ log, cb) => {
request.pipe(passThrough);
return errorWrapperFn('uploadPart', 'createBlockFromStream',
[blockId, bucketName, objectKey, passThrough, size, options,
(err, result) => {
if (err) {
log.error('Error from Azure data backend uploadPart',
{ error: err.message, dataStoreName });
if (err.code === 'ContainerNotFound') {
return cb(errors.NoSuchBucket);
(err, result) => {
if (err) {
log.error('Error from Azure data backend uploadPart',
{ error: err.message, dataStoreName });
if (err.code === 'ContainerNotFound') {
return cb(errors.NoSuchBucket);
}
if (err.code === 'InvalidMd5') {
return cb(errors.InvalidDigest);
}
if (err.code === 'Md5Mismatch') {
return cb(errors.BadDigest);
}
return cb(errors.InternalError.customizeDescription(
`Error returned from Azure: ${err.message}`),
);
}
if (err.code === 'InvalidMd5') {
return cb(errors.InvalidDigest);
}
if (err.code === 'Md5Mismatch') {
return cb(errors.BadDigest);
}
return cb(errors.InternalError.customizeDescription(
`Error returned from Azure: ${err.message}`)
);
}
const md5 = result.headers['content-md5'] || '';
const eTag = objectUtils.getHexMD5(md5);
return cb(null, eTag, size);
}], log, cb);
const md5 = result.headers['content-md5'] || '';
const eTag = objectUtils.getHexMD5(md5);
return cb(null, eTag, size);
}], log, cb);
};
azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo,
subPartStream, subPartIndex, resultsCollector, log, cb) => {
subPartStream, subPartIndex, resultsCollector, log, cb) => {
const { uploadId, partNumber, bucketName, objectKey } = partParams;
const subPartSize = azureMpuUtils.getSubPartSize(
subPartInfo, subPartIndex);
@ -140,11 +140,11 @@ subPartStream, subPartIndex, resultsCollector, log, cb) => {
resultsCollector.pushOp();
errorWrapperFn('uploadPart', 'createBlockFromStream',
[subPartId, bucketName, objectKey, subPartStream, subPartSize,
{}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb);
{}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb);
};
azureMpuUtils.putSubParts = (errorWrapperFn, request, params,
dataStoreName, log, cb) => {
dataStoreName, log, cb) => {
const subPartInfo = azureMpuUtils.getSubPartInfo(params.size);
const resultsCollector = new ResultsCollector();
const hashedStream = new MD5Sum();

View File

@ -31,9 +31,9 @@ convertMethods.listMultipartUploads = xmlParams => {
const l = xmlParams.list;
xml.push('<?xml version="1.0" encoding="UTF-8"?>',
'<ListMultipartUploadsResult ' +
'<ListMultipartUploadsResult ' +
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`,
);
// For certain XML elements, if it is `undefined`, AWS returns either an
@ -58,7 +58,7 @@ convertMethods.listMultipartUploads = xmlParams => {
});
xml.push(`<MaxUploads>${escapeForXml(l.MaxKeys)}</MaxUploads>`,
`<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`
`<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`,
);
l.Uploads.forEach(upload => {
@ -69,29 +69,29 @@ convertMethods.listMultipartUploads = xmlParams => {
}
xml.push('<Upload>',
`<Key>${escapeForXml(key)}</Key>`,
`<UploadId>${escapeForXml(val.UploadId)}</UploadId>`,
'<Initiator>',
`<ID>${escapeForXml(val.Initiator.ID)}</ID>`,
`<DisplayName>${escapeForXml(val.Initiator.DisplayName)}` +
`<Key>${escapeForXml(key)}</Key>`,
`<UploadId>${escapeForXml(val.UploadId)}</UploadId>`,
'<Initiator>',
`<ID>${escapeForXml(val.Initiator.ID)}</ID>`,
`<DisplayName>${escapeForXml(val.Initiator.DisplayName)}` +
'</DisplayName>',
'</Initiator>',
'<Owner>',
`<ID>${escapeForXml(val.Owner.ID)}</ID>`,
`<DisplayName>${escapeForXml(val.Owner.DisplayName)}` +
'</Initiator>',
'<Owner>',
`<ID>${escapeForXml(val.Owner.ID)}</ID>`,
`<DisplayName>${escapeForXml(val.Owner.DisplayName)}` +
'</DisplayName>',
'</Owner>',
`<StorageClass>${escapeForXml(val.StorageClass)}` +
'</Owner>',
`<StorageClass>${escapeForXml(val.StorageClass)}` +
'</StorageClass>',
`<Initiated>${escapeForXml(val.Initiated)}</Initiated>`,
'</Upload>'
`<Initiated>${escapeForXml(val.Initiated)}</Initiated>`,
'</Upload>',
);
});
l.CommonPrefixes.forEach(prefix => {
xml.push('<CommonPrefixes>',
`<Prefix>${escapeForXml(prefix)}</Prefix>`,
'</CommonPrefixes>'
`<Prefix>${escapeForXml(prefix)}</Prefix>`,
'</CommonPrefixes>',
);
});

View File

@ -5,7 +5,6 @@ const Readable = require('stream').Readable;
* This class is used to produce zeros filled buffers for a reader consumption
*/
class NullStream extends Readable {
/**
* Construct a new zeros filled buffers producer that will
* produce as much bytes as specified by the range parameter, or the size
@ -32,8 +31,8 @@ class NullStream extends Readable {
_read(size) {
const toRead = Math.min(size, this.bytesToRead);
const buffer = toRead > 0
? Buffer.alloc(toRead, 0)
: null;
? Buffer.alloc(toRead, 0)
: null;
this.bytesToRead -= toRead;
this.push(buffer);
}

View File

@ -4,11 +4,11 @@ const errors = require('../errors');
const escapeForXml = require('./escapeForXml');
const errorInvalidArgument = errors.InvalidArgument
.customizeDescription('The header \'x-amz-tagging\' shall be ' +
.customizeDescription('The header \'x-amz-tagging\' shall be ' +
'encoded as UTF-8 then URLEncoded URL query parameters without ' +
'tag name duplicates.');
const errorBadRequestLimit50 = errors.BadRequest
.customizeDescription('Object tags cannot be greater than 50');
.customizeDescription('Object tags cannot be greater than 50');
/*
Format of xml request:
@ -38,7 +38,7 @@ const _validator = {
result.Tagging.TagSet &&
result.Tagging.TagSet.length === 1 &&
(
result.Tagging.TagSet[0] === '' ||
result.Tagging.TagSet[0] === '' ||
result.Tagging.TagSet[0] &&
Object.keys(result.Tagging.TagSet[0]).length === 1 &&
result.Tagging.TagSet[0].Tag &&
@ -155,7 +155,7 @@ function parseTagXml(xml, log, cb) {
function convertToXml(objectTags) {
const xml = [];
xml.push('<?xml version="1.0" encoding="UTF-8" standalone="yes"?>',
'<Tagging> <TagSet>');
'<Tagging> <TagSet>');
if (objectTags && Object.keys(objectTags).length > 0) {
Object.keys(objectTags).forEach(key => {
xml.push(`<Tag><Key>${escapeForXml(key)}</Key>` +

View File

@ -42,7 +42,7 @@ function checkBucketAndKey(bucketName, objectKey, method, reqQuery,
log.debug('empty bucket name', { method: 'routes' });
return (method !== 'OPTIONS') ?
errors.MethodNotAllowed : errors.AccessForbidden
.customizeDescription('CORSResponse: Bucket not found');
.customizeDescription('CORSResponse: Bucket not found');
}
if (bucketName !== undefined && routesUtils.isValidBucketName(bucketName,
blacklistedPrefixes.bucket) === false) {
@ -85,7 +85,7 @@ function checkTypes(req, res, params, logger, s3config) {
'bad routes param: internalHandlers must be an object');
if (params.statsClient) {
assert.strictEqual(typeof params.statsClient, 'object',
'bad routes param: statsClient must be an object');
'bad routes param: statsClient must be an object');
}
assert(Array.isArray(params.allEndpoints),
'bad routes param: allEndpoints must be an array');
@ -93,13 +93,13 @@ function checkTypes(req, res, params, logger, s3config) {
'bad routes param: allEndpoints must have at least one endpoint');
params.allEndpoints.forEach(endpoint => {
assert.strictEqual(typeof endpoint, 'string',
'bad routes param: each item in allEndpoints must be a string');
'bad routes param: each item in allEndpoints must be a string');
});
assert(Array.isArray(params.websiteEndpoints),
'bad routes param: allEndpoints must be an array');
params.websiteEndpoints.forEach(endpoint => {
assert.strictEqual(typeof endpoint, 'string',
'bad routes param: each item in websiteEndpoints must be a string');
'bad routes param: each item in websiteEndpoints must be a string');
});
assert.strictEqual(typeof params.blacklistedPrefixes, 'object',
'bad routes param: blacklistedPrefixes must be an object');
@ -107,13 +107,13 @@ function checkTypes(req, res, params, logger, s3config) {
'bad routes param: blacklistedPrefixes.bucket must be an array');
params.blacklistedPrefixes.bucket.forEach(pre => {
assert.strictEqual(typeof pre, 'string',
'bad routes param: each blacklisted bucket prefix must be a string');
'bad routes param: each blacklisted bucket prefix must be a string');
});
assert(Array.isArray(params.blacklistedPrefixes.object),
'bad routes param: blacklistedPrefixes.object must be an array');
params.blacklistedPrefixes.object.forEach(pre => {
assert.strictEqual(typeof pre, 'string',
'bad routes param: each blacklisted object prefix must be a string');
'bad routes param: each blacklisted object prefix must be a string');
});
assert.strictEqual(typeof params.dataRetrievalFn, 'function',
'bad routes param: dataRetrievalFn must be a defined function');
@ -171,8 +171,8 @@ function routes(req, res, params, logger, s3config) {
reqUids = undefined;
}
const log = (reqUids !== undefined ?
logger.newRequestLoggerFromSerializedUids(reqUids) :
logger.newRequestLogger());
logger.newRequestLoggerFromSerializedUids(reqUids) :
logger.newRequestLogger());
if (!req.url.startsWith('/_/healthcheck')) {
log.info('received request', clientInfo);
@ -207,7 +207,7 @@ function routes(req, res, params, logger, s3config) {
return routesUtils.responseXMLBody(
errors.InvalidURI.customizeDescription('Could not parse the ' +
'specified URI. Check your restEndpoints configuration.'),
undefined, res, log);
undefined, res, log);
}
log.addDefaultFields({
@ -229,7 +229,7 @@ function routes(req, res, params, logger, s3config) {
if (bucketOrKeyError) {
log.trace('error with bucket or key value',
{ error: bucketOrKeyError });
{ error: bucketOrKeyError });
return routesUtils.responseXMLBody(bucketOrKeyError, null, res, log);
}

View File

@ -7,7 +7,7 @@ function routeDELETE(request, response, api, log, statsClient) {
if (request.query.uploadId) {
if (request.objectKey === undefined) {
return routesUtils.responseNoBody(
errors.InvalidRequest.customizeDescription('A key must be ' +
errors.InvalidRequest.customizeDescription('A key must be ' +
'specified'), null, response, 200, log);
}
api.callApiMethod('multipartDelete', request, response, log,
@ -19,77 +19,77 @@ function routeDELETE(request, response, api, log, statsClient) {
} else if (request.objectKey === undefined) {
if (request.query.website !== undefined) {
return api.callApiMethod('bucketDeleteWebsite', request,
response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
} else if (request.query.cors !== undefined) {
return api.callApiMethod('bucketDeleteCors', request, response,
log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
} else if (request.query.replication !== undefined) {
return api.callApiMethod('bucketDeleteReplication', request,
response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
} else if (request.query.lifecycle !== undefined) {
return api.callApiMethod('bucketDeleteLifecycle', request,
response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
} else if (request.query.policy !== undefined) {
return api.callApiMethod('bucketDeletePolicy', request,
response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
} else if (request.query.encryption !== undefined) {
return api.callApiMethod('bucketDeleteEncryption', request,
response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log);
});
}
api.callApiMethod('bucketDelete', request, response, log,
(err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, response,
204, log);
});
(err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, response,
204, log);
});
} else {
if (request.query.tagging !== undefined) {
return api.callApiMethod('objectDeleteTagging', request,
response, log, (err, resHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, resHeaders,
response, 204, log);
});
response, log, (err, resHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, resHeaders,
response, 204, log);
});
}
api.callApiMethod('objectDelete', request, response, log,
(err, corsHeaders) => {
/*
(err, corsHeaders) => {
/*
* Since AWS expects a 204 regardless of the existence of
the object, the errors NoSuchKey and NoSuchVersion should not
* be sent back as a response.
*/
if (err && !err.NoSuchKey && !err.NoSuchVersion) {
return routesUtils.responseNoBody(err, corsHeaders,
response, null, log);
}
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(null, corsHeaders, response,
204, log);
});
if (err && !err.NoSuchKey && !err.NoSuchVersion) {
return routesUtils.responseNoBody(err, corsHeaders,
response, null, log);
}
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(null, corsHeaders, response,
204, log);
});
}
return undefined;
}

View File

@ -16,18 +16,18 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
// GET bucket ACL
if (request.query.acl !== undefined) {
api.callApiMethod('bucketGetACL', request, response, log,
(err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, log,
corsHeaders);
});
(err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, log,
corsHeaders);
});
} else if (request.query.replication !== undefined) {
api.callApiMethod('bucketGetReplication', request, response, log,
(err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, log,
corsHeaders);
});
(err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, log,
corsHeaders);
});
} else if (request.query.cors !== undefined) {
api.callApiMethod('bucketGetCors', request, response, log,
(err, xml, corsHeaders) => {
@ -69,7 +69,7 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
(err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, log,
corsHeaders);
corsHeaders);
});
} else if (request.query.policy !== undefined) {
api.callApiMethod('bucketGetPolicy', request, response, log,
@ -94,11 +94,11 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
});
} else if (request.query.encryption !== undefined) {
api.callApiMethod('bucketGetEncryption', request, response, log,
(err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response,
log, corsHeaders);
});
(err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response,
log, corsHeaders);
});
} else {
// GET bucket
api.callApiMethod('bucketGet', request, response, log,

View File

@ -21,11 +21,11 @@ function routeOPTIONS(request, response, api, log, statsClient) {
}
return api.callApiMethod('corsPreflight', request, response, log,
(err, resHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, resHeaders, response, 200,
log);
});
(err, resHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, resHeaders, response, 200,
log);
});
}
module.exports = routeOPTIONS;

View File

@ -27,28 +27,28 @@ function routePOST(request, response, api, log) {
if (request.query.uploads !== undefined) {
return api.callApiMethod('initiateMultipartUpload', request,
response, log, (err, result, corsHeaders) =>
routesUtils.responseXMLBody(err, result, response, log,
corsHeaders));
routesUtils.responseXMLBody(err, result, response, log,
corsHeaders));
}
// POST complete multipart upload
if (request.query.uploadId !== undefined) {
return api.callApiMethod('completeMultipartUpload', request,
response, log, (err, result, resHeaders) =>
routesUtils.responseXMLBody(err, result, response, log,
resHeaders));
routesUtils.responseXMLBody(err, result, response, log,
resHeaders));
}
// POST multiObjectDelete
if (request.query.delete !== undefined) {
return api.callApiMethod('multiObjectDelete', request, response,
log, (err, xml, corsHeaders) =>
routesUtils.responseXMLBody(err, xml, response, log,
corsHeaders));
routesUtils.responseXMLBody(err, xml, response, log,
corsHeaders));
}
return routesUtils.responseNoBody(errors.NotImplemented, null, response,
200, log);
200, log);
}
/* eslint-enable no-param-reassign */
module.exports = routePOST;

View File

@ -14,16 +14,16 @@ function routePUT(request, response, api, log, statsClient) {
|| contentLength < 0)) || contentLength === '') {
log.debug('invalid content-length header');
return routesUtils.responseNoBody(
errors.BadRequest, null, response, null, log);
errors.BadRequest, null, response, null, log);
}
// PUT bucket ACL
if (request.query.acl !== undefined) {
api.callApiMethod('bucketPutACL', request, response, log,
(err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 200, log);
});
(err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 200, log);
});
} else if (request.query.versioning !== undefined) {
api.callApiMethod('bucketPutVersioning', request, response, log,
(err, corsHeaders) => {
@ -82,11 +82,11 @@ function routePUT(request, response, api, log, statsClient) {
});
} else if (request.query.encryption !== undefined) {
api.callApiMethod('bucketPutEncryption', request, response, log,
(err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 200, log);
});
(err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders,
response, 200, log);
});
} else {
// PUT bucket
return api.callApiMethod('bucketPut', request, response, log,
@ -110,7 +110,7 @@ function routePUT(request, response, api, log, statsClient) {
method: 'routePUT',
});
return routesUtils
.responseNoBody(errors.InvalidDigest, null, response, 200, log);
.responseNoBody(errors.InvalidDigest, null, response, 200, log);
}
if (request.headers['content-md5']) {
request.contentMD5 = request.headers['content-md5'];
@ -126,17 +126,17 @@ function routePUT(request, response, api, log, statsClient) {
});
return routesUtils
.responseNoBody(errors.InvalidDigest, null, response, 200,
log);
log);
}
}
if (request.query.partNumber) {
if (request.headers['x-amz-copy-source']) {
api.callApiMethod('objectPutCopyPart', request, response, log,
(err, xml, additionalHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, log,
(err, xml, additionalHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, log,
additionalHeaders);
});
});
} else {
api.callApiMethod('objectPutPart', request, response, log,
(err, calculatedHash, corsHeaders) => {
@ -202,11 +202,11 @@ function routePUT(request, response, api, log, statsClient) {
contentLength: request.parsedContentLength,
});
api.callApiMethod('objectPut', request, response, log,
(err, resHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, resHeaders,
response, 200, log);
});
(err, resHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, resHeaders,
response, 200, log);
});
}
}
return undefined;

View File

@ -14,7 +14,7 @@ function routerWebsite(request, response, api, log, statsClient,
if (request.method === 'GET') {
return api.callApiMethod('websiteGet', request, response, log,
(err, userErrorPageFailure, dataGetInfo, resMetaHeaders,
redirectInfo, key) => {
redirectInfo, key) => {
routesUtils.statsReport500(err, statsClient);
// request being redirected
if (redirectInfo) {
@ -43,21 +43,21 @@ function routerWebsite(request, response, api, log, statsClient,
}
if (request.method === 'HEAD') {
return api.callApiMethod('websiteHead', request, response, log,
(err, resMetaHeaders, redirectInfo, key) => {
routesUtils.statsReport500(err, statsClient);
if (redirectInfo) {
return routesUtils.redirectRequest(redirectInfo,
key, request.connection.encrypted,
response, request.headers.host, resMetaHeaders, log);
}
// could redirect on err so check for redirectInfo first
if (err) {
return routesUtils.errorHeaderResponse(err, response,
resMetaHeaders, log);
}
return routesUtils.responseContentHeaders(err, {}, resMetaHeaders,
response, log);
});
(err, resMetaHeaders, redirectInfo, key) => {
routesUtils.statsReport500(err, statsClient);
if (redirectInfo) {
return routesUtils.redirectRequest(redirectInfo,
key, request.connection.encrypted,
response, request.headers.host, resMetaHeaders, log);
}
// could redirect on err so check for redirectInfo first
if (err) {
return routesUtils.errorHeaderResponse(err, response,
resMetaHeaders, log);
}
return routesUtils.responseContentHeaders(err, {}, resMetaHeaders,
response, log);
});
}
return undefined;
}

View File

@ -25,7 +25,7 @@ function setCommonResponseHeaders(headers, response, log) {
} catch (e) {
log.debug('header can not be added ' +
'to the response', { header: headers[key],
error: e.stack, method: 'setCommonResponseHeaders' });
error: e.stack, method: 'setCommonResponseHeaders' });
}
}
});
@ -68,7 +68,7 @@ const XMLResponseBackend = {
* @return {object} response - response object with additional headers
*/
okResponse: function okXMLResponse(xml, response, log,
additionalHeaders) {
additionalHeaders) {
const bytesSent = Buffer.byteLength(xml);
log.trace('sending success xml response');
log.addDefaultFields({
@ -115,7 +115,7 @@ const XMLResponseBackend = {
`<Message>${errCode.description}</Message>`,
'<Resource></Resource>',
`<RequestId>${log.getSerializedUids()}</RequestId>`,
'</Error>'
'</Error>',
);
const xmlStr = xml.join('');
const bytesSent = Buffer.byteLength(xmlStr);
@ -145,7 +145,7 @@ const JSONResponseBackend = {
* @return {object} response - response object with additional headers
*/
okResponse: function okJSONResponse(json, response, log,
additionalHeaders) {
additionalHeaders) {
const bytesSent = Buffer.byteLength(json);
log.trace('sending success json response');
log.addDefaultFields({
@ -163,7 +163,7 @@ const JSONResponseBackend = {
},
errorResponse: function errorJSONResponse(errCode, response, log,
corsHeaders) {
corsHeaders) {
log.trace('sending error json response', { errCode });
/*
{
@ -337,27 +337,27 @@ function retrieveData(locations, retrieveDataFn, response, log) {
currentStream = readable;
return readable.pipe(response, { end: false });
}), err => {
currentStream = null;
if (err) {
log.debug('abort response due to error', {
error: err.code, errMsg: err.message });
}
// call end for all cases (error/success) per node.js docs
// recommendation
response.end();
currentStream = null;
if (err) {
log.debug('abort response due to error', {
error: err.code, errMsg: err.message });
}
// call end for all cases (error/success) per node.js docs
// recommendation
response.end();
},
);
}
function _responseBody(responseBackend, errCode, payload, response, log,
additionalHeaders) {
additionalHeaders) {
if (errCode && !response.headersSent) {
return responseBackend.errorResponse(errCode, response, log,
additionalHeaders);
additionalHeaders);
}
if (!response.headersSent) {
return responseBackend.okResponse(payload, response, log,
additionalHeaders);
additionalHeaders);
}
return undefined;
}
@ -366,8 +366,8 @@ function _computeContentLengthFromLocation(dataLocations) {
return dataLocations.reduce(
(sum, location) => (sum !== undefined &&
(typeof location.size === 'number' || typeof location.size === 'string') ?
sum + Number.parseInt(location.size, 10) :
undefined), 0);
sum + Number.parseInt(location.size, 10) :
undefined), 0);
}
function _contentLengthMatchesLocations(contentLength, dataLocations) {
@ -388,7 +388,7 @@ const routesUtils = {
*/
responseXMLBody(errCode, xml, response, log, additionalHeaders) {
return _responseBody(XMLResponseBackend, errCode, xml, response,
log, additionalHeaders);
log, additionalHeaders);
},
/**
@ -402,7 +402,7 @@ const routesUtils = {
*/
responseJSONBody(errCode, json, response, log, additionalHeaders) {
return _responseBody(JSONResponseBackend, errCode, json, response,
log, additionalHeaders);
log, additionalHeaders);
},
/**
@ -417,7 +417,7 @@ const routesUtils = {
responseNoBody(errCode, resHeaders, response, httpCode = 200, log) {
if (errCode && !response.headersSent) {
return XMLResponseBackend.errorResponse(errCode, response, log,
resHeaders);
resHeaders);
}
if (!response.headersSent) {
return okHeaderResponse(resHeaders, response, httpCode, log);
@ -435,10 +435,10 @@ const routesUtils = {
* @return {object} - router's response object
*/
responseContentHeaders(errCode, overrideParams, resHeaders, response,
log) {
log) {
if (errCode && !response.headersSent) {
return XMLResponseBackend.errorResponse(errCode, response, log,
resHeaders);
resHeaders);
}
if (!response.headersSent) {
// Undefined added as an argument since need to send range to
@ -472,7 +472,7 @@ const routesUtils = {
retrieveDataFn, response, range, log) {
if (errCode && !response.headersSent) {
return XMLResponseBackend.errorResponse(errCode, response, log,
resHeaders);
resHeaders);
}
if (dataLocations !== null && !response.headersSent) {
// sanity check of content length against individual data
@ -480,13 +480,13 @@ const routesUtils = {
const contentLength = resHeaders && resHeaders['Content-Length'];
if (contentLength !== undefined &&
!_contentLengthMatchesLocations(contentLength,
dataLocations)) {
dataLocations)) {
log.error('logic error: total length of fetched data ' +
'locations does not match returned content-length',
{ contentLength, dataLocations });
{ contentLength, dataLocations });
return XMLResponseBackend.errorResponse(errors.InternalError,
response, log,
resHeaders);
response, log,
resHeaders);
}
}
if (!response.headersSent) {
@ -558,7 +558,7 @@ const routesUtils = {
`<h1>${err.code} ${response.statusMessage}</h1>`,
'<ul>',
`<li>Code: ${err.message}</li>`,
`<li>Message: ${err.description}</li>`
`<li>Message: ${err.description}</li>`,
);
if (!userErrorPageFailure && bucketName) {
@ -568,7 +568,7 @@ const routesUtils = {
`<li>RequestId: ${log.getSerializedUids()}</li>`,
// AWS response contains HostId here.
// TODO: consider adding
'</ul>'
'</ul>',
);
if (userErrorPageFailure) {
html.push(
@ -578,13 +578,13 @@ const routesUtils = {
'<ul>',
`<li>Code: ${err.message}</li>`,
`<li>Message: ${err.description}</li>`,
'</ul>'
'</ul>',
);
}
html.push(
'<hr/>',
'</body>',
'</html>'
'</html>',
);
return response.end(html.join(''), 'utf8', () => {
@ -806,7 +806,7 @@ const routesUtils = {
// most specific potential hostname
bucketName =
potentialBucketName.length < bucketName.length ?
potentialBucketName : bucketName;
potentialBucketName : bucketName;
}
}
}
@ -814,7 +814,7 @@ const routesUtils = {
return bucketName;
}
throw new Error(
`bad request: hostname ${host} is not in valid endpoints`
`bad request: hostname ${host} is not in valid endpoints`,
);
},

View File

@ -33,7 +33,6 @@ const FOLDER_HASH = 3511;
* directory hash structure under the configured dataPath.
*/
class DataFileStore {
/**
* @constructor
* @param {Object} dataConfig - configuration of the file backend
@ -66,13 +65,13 @@ class DataFileStore {
fs.access(this.dataPath, fs.F_OK | fs.R_OK | fs.W_OK, err => {
if (err) {
this.logger.error('Data path is not readable or writable',
{ error: err });
{ error: err });
return callback(err);
}
// Create FOLDER_HASH subdirectories
const subDirs = Array.from({ length: FOLDER_HASH },
(v, k) => (k).toString());
(v, k) => (k).toString());
this.logger.info(`pre-creating ${subDirs.length} subdirs...`);
if (!this.noSync) {
storageUtils.setDirSyncFlag(this.dataPath, this.logger);
@ -89,7 +88,7 @@ class DataFileStore {
err => {
if (err) {
this.logger.error('Error creating subdirs',
{ error: err });
{ error: err });
return callback(err);
}
this.logger.info('data file store init complete, ' +
@ -131,7 +130,7 @@ class DataFileStore {
fs.open(filePath, 'wx', (err, fd) => {
if (err) {
log.error('error opening filePath',
{ method: 'put', key, filePath, error: err });
{ method: 'put', key, filePath, error: err });
return callback(errors.InternalError.customizeDescription(
`filesystem error: open() returned ${err.code}`));
}
@ -145,7 +144,7 @@ class DataFileStore {
fileStream.on('finish', () => {
function ok() {
log.debug('finished writing data',
{ method: 'put', key, filePath });
{ method: 'put', key, filePath });
return cbOnce(null, key);
}
if (this.noSync) {
@ -177,7 +176,7 @@ class DataFileStore {
return undefined;
}).on('error', err => {
log.error('error streaming data on write',
{ method: 'put', key, filePath, error: err });
{ method: 'put', key, filePath, error: err });
// destroying the write stream forces a close(fd)
fileStream.destroy();
return cbOnce(errors.InternalError.customizeDescription(
@ -215,7 +214,7 @@ class DataFileStore {
return callback(errors.ObjNotFound);
}
log.error('error on \'stat\' of file',
{ key, filePath, error: err });
{ key, filePath, error: err });
return callback(errors.InternalError.customizeDescription(
`filesystem error: stat() returned ${err.code}`));
}
@ -248,21 +247,21 @@ class DataFileStore {
readStreamOptions.end = byteRange[1];
}
log.debug('opening readStream to get data',
{ method: 'get', key, filePath, byteRange });
{ method: 'get', key, filePath, byteRange });
const cbOnce = jsutil.once(callback);
const rs = fs.createReadStream(filePath, readStreamOptions)
.on('error', err => {
if (err.code === 'ENOENT') {
return cbOnce(errors.ObjNotFound);
}
log.error('error retrieving file',
{ method: 'get', key, filePath,
error: err });
return cbOnce(
errors.InternalError.customizeDescription(
`filesystem read error: ${err.code}`));
})
.on('open', () => { cbOnce(null, rs); });
.on('error', err => {
if (err.code === 'ENOENT') {
return cbOnce(errors.ObjNotFound);
}
log.error('error retrieving file',
{ method: 'get', key, filePath,
error: err });
return cbOnce(
errors.InternalError.customizeDescription(
`filesystem read error: ${err.code}`));
})
.on('open', () => { cbOnce(null, rs); });
}
/**

View File

@ -175,25 +175,25 @@ class MetadataWrapper {
const value = typeof objVal.getValue === 'function' ?
objVal.getValue() : objVal;
this.client.putObject(bucketName, objName, value, params, log,
(err, data) => {
if (err) {
log.debug('error from metadata', { implName: this.implName,
error: err });
return cb(err);
}
if (data) {
log.debug('object version successfully put in metadata',
{ version: data });
} else {
log.debug('object successfully put in metadata');
}
return cb(err, data);
});
(err, data) => {
if (err) {
log.debug('error from metadata', { implName: this.implName,
error: err });
return cb(err);
}
if (data) {
log.debug('object version successfully put in metadata',
{ version: data });
} else {
log.debug('object successfully put in metadata');
}
return cb(err, data);
});
}
getBucketAndObjectMD(bucketName, objName, params, log, cb) {
log.debug('getting bucket and object from metadata',
{ database: bucketName, object: objName });
{ database: bucketName, object: objName });
this.client.getBucketAndObject(bucketName, objName, params, log,
(err, data) => {
if (err) {
@ -202,7 +202,7 @@ class MetadataWrapper {
return cb(err);
}
log.debug('bucket and object retrieved from metadata',
{ database: bucketName, object: objName });
{ database: bucketName, object: objName });
return cb(err, data);
});
}

View File

@ -5,7 +5,7 @@ const BucketInfo = require('../../../models/BucketInfo');
class BucketClientInterface {
constructor(params, bucketclient, logger) {
assert(params.bucketdBootstrap.length > 0,
'bucketd bootstrap list is empty');
'bucketd bootstrap list is empty');
const bootstrap = params.bucketdBootstrap;
const log = params.bucketdLog;
if (params.https) {
@ -29,7 +29,7 @@ class BucketClientInterface {
createBucket(bucketName, bucketMD, log, cb) {
this.client.createBucket(bucketName, log.getSerializedUids(),
bucketMD.serialize(), cb);
bucketMD.serialize(), cb);
return null;
}
@ -57,17 +57,17 @@ class BucketClientInterface {
getRaftBuckets(raftId, log, cb) {
return this.client.getRaftBuckets(raftId, log.getSerializedUids(),
(err, data) => {
if (err) {
return cb(err);
}
return cb(null, JSON.parse(data));
});
(err, data) => {
if (err) {
return cb(err);
}
return cb(null, JSON.parse(data));
});
}
putBucketAttributes(bucketName, bucketMD, log, cb) {
this.client.putBucketAttributes(bucketName, log.getSerializedUids(),
bucketMD.serialize(), cb);
bucketMD.serialize(), cb);
return null;
}
@ -95,7 +95,7 @@ class BucketClientInterface {
deleteObject(bucketName, objName, params, log, cb) {
this.client.deleteObject(bucketName, objName, log.getSerializedUids(),
cb, params);
cb, params);
return null;
}
@ -183,8 +183,8 @@ class BucketClientInterface {
reason.msg = undefined;
respBody[implName] = {
code: 200,
message, // Provide interpreted reason msg
body: reason, // Provide analysis data
message, // Provide interpreted reason msg
body: reason, // Provide analysis data
};
if (failure) {
// Setting the `error` field is how the healthCheck

View File

@ -30,7 +30,6 @@ class ListRecordStream extends stream.Transform {
* @classdesc Proxy object to access raft log API
*/
class LogConsumer {
/**
* @constructor
*
@ -97,14 +96,14 @@ class LogConsumer {
if (err.code === 404) {
// no such raft session, log and ignore
this.logger.warn('raft session does not exist yet',
{ raftId: this.raftSession });
{ raftId: this.raftSession });
return cbOnce(null, { info: { start: null,
end: null } });
}
if (err.code === 416) {
// requested range not satisfiable
this.logger.debug('no new log record to process',
{ raftId: this.raftSession });
{ raftId: this.raftSession });
return cbOnce(null, { info: { start: null,
end: null } });
}
@ -116,7 +115,7 @@ class LogConsumer {
// is emitted
recordStream.on('error', err => {
this.logger.error('error receiving raft log',
{ error: err.message });
{ error: err.message });
return cbOnce(errors.InternalError);
});
const jsonResponse = stream.pipe(jsonStream.parse('log.*'));
@ -127,7 +126,7 @@ class LogConsumer {
// remove temporary listener
recordStream.removeAllListeners('error');
return cbOnce(null, { info: header.info,
log: recordStream });
log: recordStream });
})
.on('error', err => recordStream.emit('error', err));
return undefined;

View File

@ -8,7 +8,6 @@ const { RecordLogProxy } = require('./RecordLog.js');
const werelogs = require('werelogs');
class MetadataFileClient {
/**
* Construct a metadata client
*
@ -87,7 +86,7 @@ class MetadataFileClient {
logProxy.connect(err => {
if (err) {
this.logger.error('error connecting to record log service',
{ url, error: err.stack });
{ url, error: err.stack });
return done(err);
}
this.logger.info('connected to record log service', { url });

View File

@ -25,7 +25,6 @@ const SYNC_OPTIONS = { sync: true };
const SUBLEVEL_SEP = '::';
class MetadataFileServer {
/**
* Construct a metadata server
*
@ -218,7 +217,7 @@ class MetadataFileServer {
});
} else {
this.rootDb.batch(ops, SYNC_OPTIONS,
err => callback(err));
err => callback(err));
}
},
};
@ -235,17 +234,17 @@ class MetadataFileServer {
put: (env, key, value, options, cb) => {
const dbName = env.subLevel.join(SUBLEVEL_SEP);
vrp.put({ db: dbName, key, value, options },
env.requestLogger, cb);
env.requestLogger, cb);
},
del: (env, key, options, cb) => {
const dbName = env.subLevel.join(SUBLEVEL_SEP);
vrp.del({ db: dbName, key, options },
env.requestLogger, cb);
env.requestLogger, cb);
},
get: (env, key, options, cb) => {
const dbName = env.subLevel.join(SUBLEVEL_SEP);
vrp.get({ db: dbName, key, options },
env.requestLogger, cb);
env.requestLogger, cb);
},
getDiskUsage: (env, cb) => diskusage.check(this.path, cb),
});

View File

@ -18,7 +18,6 @@ const DEFAULT_RECORD_LOG_NAME = 's3-recordlog';
* object.
*/
class RecordLogProxy extends rpc.BaseClient {
constructor(params) {
super(params);
@ -102,7 +101,6 @@ class ListRecordStream extends stream.Transform {
* updates can be transactional with each other.
*/
class RecordLogService extends rpc.BaseService {
/**
* @constructor
*
@ -274,12 +272,12 @@ class RecordLogService extends rpc.BaseService {
limit: _params.limit,
};
const userStream = new ListRecordStream(endSeq,
_params.limit);
_params.limit);
const dbStream =
openLog.logDb.createReadStream(queryParams);
dbStream.pipe(userStream);
dbStream.once('error',
err => userStream.emit('error', err));
err => userStream.emit('error', err));
userStream.once('error', err => {
userStream.removeAllListeners('info');
cb(err);

View File

@ -60,8 +60,8 @@ class TestMatrix {
this.elementsToSpecialize = elementsToSpecialize;
this.callback = callback;
this.description = typeof description === 'undefined'
? ''
: description;
? ''
: description;
return this;
}
@ -158,15 +158,15 @@ class TestMatrix {
const callFunction = (matrixFather, matrixChild, callback,
description) => {
const result = Object.keys(matrixChild.params)
.every(currentKey =>
Object.prototype.toString.call(
matrixChild.params[currentKey]
.every(currentKey =>
Object.prototype.toString.call(
matrixChild.params[currentKey],
).indexOf('Array') === -1);
if (result === true) {
describe(matrixChild.serialize(), () => {
it(description,
done => callback(matrixChild, done));
done => callback(matrixChild, done));
});
} else {
describe(matrixChild.serialize(), () => {

View File

@ -247,7 +247,7 @@ function decode(str) {
}
module.exports = { generateVersionId, getInfVid,
hexEncode, hexDecode,
base62Encode, base62Decode,
encode, decode,
ENC_TYPE_HEX, ENC_TYPE_BASE62 };
hexEncode, hexDecode,
base62Encode, base62Decode,
encode, decode,
ENC_TYPE_HEX, ENC_TYPE_BASE62 };

View File

@ -84,7 +84,7 @@ class VersioningRequestProcessor {
return callback(null, data);
}
logger.debug('master version is a PHD, getting the latest version',
{ db, key });
{ db, key });
// otherwise, need to search for the latest version
return this.getByListing(request, logger, callback);
});
@ -187,7 +187,7 @@ class VersioningRequestProcessor {
return entry.callback(err, value);
}
return this.wgm.get(entry.request, entry.logger,
entry.callback);
entry.callback);
});
delete this.queue[cacheKey];
}
@ -267,19 +267,19 @@ class VersioningRequestProcessor {
return callback(err);
}
return this.writeCache.batch({ db, array, options },
logger, err => callback(err, `{"versionId":"${vid}"}`));
logger, err => callback(err, `{"versionId":"${vid}"}`));
};
if (versionId) {
return this.processVersionSpecificPut(request, logger,
versioningCb);
versioningCb);
}
if (versioning) {
return this.processNewVersionPut(request, logger, versioningCb);
}
// no versioning or versioning configuration off
return this.writeCache.batch({ db, array: [{ key, value }] },
logger, callback);
logger, callback);
}
/**
@ -353,7 +353,7 @@ class VersioningRequestProcessor {
if (!(options && options.versionId)) {
return this.writeCache.batch({ db,
array: [{ key, type: 'del' }] },
logger, callback);
logger, callback);
}
// version specific DELETE
return this.processVersionSpecificDelete(request, logger,
@ -399,7 +399,7 @@ class VersioningRequestProcessor {
const cacheKey = formatCacheKey(db, key);
clearTimeout(this.repairing[cacheKey]);
this.repairing[cacheKey] = setTimeout(() =>
this.getByListing(request, logger, () => {}), 15000);
this.getByListing(request, logger, () => {}), 15000);
}
return callback(null, ops, versionId);
});

View File

@ -5,7 +5,7 @@
},
"version": "7.10.13",
"description": "Common utilities for the S3 project components",
"main": "index.js",
"main": "build/index.js",
"repository": {
"type": "git",
"url": "git+https://github.com/scality/Arsenal.git"
@ -22,11 +22,11 @@
"agentkeepalive": "^4.1.3",
"ajv": "6.12.2",
"async": "~2.1.5",
"base62": "2.0.1",
"base-x": "3.0.8",
"base62": "2.0.1",
"debug": "~2.6.9",
"diskusage": "^1.1.1",
"ioredis": "4.9.5",
"ioredis": "^4.28.5",
"ipaddr.js": "1.9.1",
"level": "~5.0.1",
"level-sublevel": "~6.6.5",
@ -44,21 +44,46 @@
"ioctl": "^2.0.2"
},
"devDependencies": {
"@babel/preset-env": "^7.16.11",
"@babel/preset-typescript": "^7.16.7",
"@sinonjs/fake-timers": "^6.0.1",
"eslint": "2.13.1",
"@types/jest": "^27.4.1",
"@types/node": "^17.0.21",
"eslint": "^8.10.0",
"eslint-config-airbnb": "6.2.0",
"eslint-config-scality": "scality/Guidelines#7.10.2",
"eslint-plugin-react": "^4.3.0",
"jest": "^27.5.1",
"mocha": "8.0.1",
"sinon": "^9.0.2",
"temp": "0.9.1"
"temp": "0.9.1",
"ts-jest": "^27.1.3",
"ts-node": "^10.6.0",
"typescript": "^4.6.2"
},
"scripts": {
"lint": "eslint $(git ls-files '*.js')",
"lint_md": "mdlint $(git ls-files '*.md')",
"lint_yml": "yamllint $(git ls-files '*.yml')",
"test": "mocha --recursive tests/unit",
"ft_test": "find tests/functional -name \"*.js\" | grep -v \"utils/\" | xargs mocha --timeout 120000"
"test": "jest tests/unit",
"build": "tsc",
"prepare": "yarn build || true",
"ft_test": "jest tests/functional --testTimeout=120000 --forceExit"
},
"private": true
"private": true,
"jest": {
"maxWorkers": 1,
"collectCoverageFrom": [
"lib/**/*.{js,ts}",
"index.js"
],
"preset": "ts-jest",
"globals": {
"test-jest": {
"diagnostics": {
"warnOnly": true
}
}
}
}
}

5
tests/.eslintrc Normal file
View File

@ -0,0 +1,5 @@
{
"env": {
"jest": true
}
}

View File

@ -5,7 +5,7 @@ const spawn = require('child_process').spawn;
let currentSpawn = undefined;
function runTest(name, done) {
const test = spawn('node', [`${__dirname}/utils/${name}.js`]);
const test = spawn('yarn', ['ts-node', '--transpile-only', `${__dirname}/utils/${name}.js`]);
currentSpawn = test;
test.stdout.pipe(process.stdout);
test.stderr.pipe(process.stderr);

View File

@ -43,28 +43,28 @@ describe('KMIP High Level Driver', () => {
it('should work with' +
` x-name attribute: ${!!bucketNameAttributeName},` +
` compound creation: ${compoundCreateActivate}`,
done => {
const kmipClient = new KMIPClient(options, TTLVCodec,
LoopbackServerTransport);
const plaintext = Buffer.from(crypto.randomBytes(32));
async.waterfall([
next => kmipClient.createBucketKey('plop', logger, next),
(id, next) =>
kmipClient.cipherDataKey(1, id, plaintext,
logger, (err, ciphered) => {
next(err, id, ciphered);
}),
(id, ciphered, next) =>
kmipClient.decipherDataKey(
1, id, ciphered, logger, (err, deciphered) => {
assert(plaintext
.compare(deciphered) === 0);
next(err, id);
}),
(id, next) =>
kmipClient.destroyBucketKey(id, logger, next),
], done);
});
done => {
const kmipClient = new KMIPClient(options, TTLVCodec,
LoopbackServerTransport);
const plaintext = Buffer.from(crypto.randomBytes(32));
async.waterfall([
next => kmipClient.createBucketKey('plop', logger, next),
(id, next) =>
kmipClient.cipherDataKey(1, id, plaintext,
logger, (err, ciphered) => {
next(err, id, ciphered);
}),
(id, ciphered, next) =>
kmipClient.decipherDataKey(
1, id, ciphered, logger, (err, deciphered) => {
assert(plaintext
.compare(deciphered) === 0);
next(err, id);
}),
(id, next) =>
kmipClient.destroyBucketKey(id, logger, next),
], done);
});
});
});
it('should succeed healthcheck with working KMIP client and server', done => {
@ -84,7 +84,7 @@ describe('KMIP High Level Driver', () => {
},
};
const kmipClient = new KMIPClient(options, TTLVCodec,
LoopbackServerTransport);
LoopbackServerTransport);
kmipClient.healthcheck(logger, err => {
assert.ifError(err);
done();

View File

@ -36,17 +36,17 @@ describe('KMIP Low Level Driver', () => {
const kmip = new KMIP(TTLVCodec, MirrorTransport, options);
const requestPayload = fixture.payload(kmip);
kmip.request(logger, fixture.operation,
requestPayload, (err, response) => {
if (err) {
return done(err);
}
const responsePayload = response.lookup(
'Response Message/Batch Item/Response Payload'
)[0];
assert.deepStrictEqual(responsePayload,
requestPayload);
return done();
});
requestPayload, (err, response) => {
if (err) {
return done(err);
}
const responsePayload = response.lookup(
'Response Message/Batch Item/Response Payload',
)[0];
assert.deepStrictEqual(responsePayload,
requestPayload);
return done();
});
});
});
});

View File

@ -7,7 +7,7 @@ const { logger } = require('../../utils/kmip/ersatz.js');
describe('KMIP Connection Management', () => {
let server;
before(done => {
beforeAll(done => {
server = net.createServer(conn => {
// abort the connection as soon as it is accepted
conn.destroy();
@ -15,7 +15,7 @@ describe('KMIP Connection Management', () => {
server.listen(5696);
server.on('listening', done);
});
after(done => {
afterAll(done => {
server.close(done);
});

View File

@ -1,82 +0,0 @@
'use strict'; // eslint-disable-line
const async = require('async');
const assert = require('assert');
const TransportTemplate =
require('../../../lib/network/kmip/transport/TransportTemplate.js');
const { logger, EchoChannel } = require('../../utils/kmip/ersatz.js');
describe('KMIP Transport Template Class', () => {
const pipelineDepths = [1, 2, 4, 8, 16, 32];
const requestNumbers = [1, 37, 1021, 8191];
pipelineDepths.forEach(pipelineDepth => {
requestNumbers.forEach(iterations => {
it(`should survive ${iterations} iterations` +
` with ${pipelineDepth}way pipeline`,
done => {
const transport = new TransportTemplate(
new EchoChannel,
{
pipelineDepth,
tls: {
port: 5696,
},
});
const request = Buffer.alloc(10).fill(6);
async.times(iterations, (n, next) => {
transport.send(logger, request,
(err, conversation, response) => {
if (err) {
return next(err);
}
if (request.compare(response) !== 0) {
return next(Error('arg'));
}
return next();
});
}, err => {
transport.end();
done(err);
});
});
[true, false].forEach(doEmit => {
it('should report errors to outstanding requests.' +
` w:${pipelineDepth}, i:${iterations}, e:${doEmit}`,
done => {
const echoChannel = new EchoChannel;
echoChannel.clog();
const transport = new TransportTemplate(
echoChannel,
{
pipelineDepth,
tls: {
port: 5696,
},
});
const request = Buffer.alloc(10).fill(6);
/* Using a for loop here instead of anything
* asynchronous, the callbacks get stuck in
* the conversation queue and are unwind with
* an error. It is the purpose of this test */
for (let i = 0; i < iterations; ++i) {
transport.send(
logger, request,
(err, conversation, response) => {
assert(err);
assert(!response);
});
}
if (doEmit) {
echoChannel.emit('error', new Error('awesome'));
} else {
transport.abortPipeline(echoChannel);
}
transport.end();
done();
});
});
});
});
});

View File

@ -0,0 +1,82 @@
'use strict'; // eslint-disable-line
const async = require('async');
const assert = require('assert');
const TransportTemplate =
require('../../../lib/network/kmip/transport/TransportTemplate.js');
const { logger, EchoChannel } = require('../../utils/kmip/ersatz.js');
describe('KMIP Transport Template Class', () => {
const pipelineDepths = [1, 2, 4, 8, 16, 32];
const requestNumbers = [1, 37, 1021, 8191];
pipelineDepths.forEach(pipelineDepth => {
requestNumbers.forEach(iterations => {
it(`should survive ${iterations} iterations` +
` with ${pipelineDepth}way pipeline`,
done => {
const transport = new TransportTemplate(
new EchoChannel,
{
pipelineDepth,
tls: {
port: 5696,
},
});
const request = Buffer.alloc(10).fill(6);
async.times(iterations, (n, next) => {
transport.send(logger, request,
(err, conversation, response) => {
if (err) {
return next(err);
}
if (request.compare(response) !== 0) {
return next(Error('arg'));
}
return next();
});
}, err => {
transport.end();
done(err);
});
});
[true, false].forEach(doEmit => {
it('should report errors to outstanding requests.' +
` w:${pipelineDepth}, i:${iterations}, e:${doEmit}`,
done => {
const echoChannel = new EchoChannel;
echoChannel.clog();
const transport = new TransportTemplate(
echoChannel,
{
pipelineDepth,
tls: {
port: 5696,
},
});
const request = Buffer.alloc(10).fill(6);
/* Using a for loop here instead of anything
* asynchronous, the callbacks get stuck in
* the conversation queue and are unwind with
* an error. It is the purpose of this test */
for (let i = 0; i < iterations; ++i) {
transport.send(
logger, request,
(err, conversation, response) => {
assert(err);
assert(!response);
});
}
if (doEmit) {
echoChannel.emit('error', new Error('awesome'));
} else {
transport.abortPipeline(echoChannel);
}
transport.end();
done();
});
});
});
});
});

View File

@ -85,11 +85,11 @@ describe('LRUCache', () => {
assert.strictEqual(lru.get(100), undefined);
});
it('max 1000000 entries', function lru1M() {
it('max 1000000 entries', () => {
// this test takes ~1-2 seconds on a laptop, nevertheless set a
// large timeout to reduce the potential of flakiness on possibly
// slower CI environment.
this.timeout(30000);
jest.setTimeout(30000);
const lru = new LRUCache(1000000);
@ -111,7 +111,7 @@ describe('LRUCache', () => {
// check present (even) and evicted (odd) items
for (let i = 0; i < 1000000; ++i) {
assert.strictEqual(lru.get(`${i}`),
i % 2 === 0 ? i : undefined);
i % 2 === 0 ? i : undefined);
assert.strictEqual(lru.remove(`${i}`), i % 2 === 0);
}
assert.strictEqual(lru.count(), 500000);

View File

@ -18,19 +18,19 @@ describe('Multipart Uploads listing algorithm', () => {
const initiator2 = { ID: '2', DisplayName: 'initiator2' };
const keys = {
v0: [`${overviewPrefix}test/1${splitter}uploadId1`,
`${overviewPrefix}test/2${splitter}uploadId2`,
`${overviewPrefix}test/3${splitter}uploadId3`,
`${overviewPrefix}testMore/4${splitter}uploadId4`,
`${overviewPrefix}testMore/5${splitter}uploadId5`,
`${overviewPrefix}prefixTest/5${splitter}uploadId5`,
],
`${overviewPrefix}test/2${splitter}uploadId2`,
`${overviewPrefix}test/3${splitter}uploadId3`,
`${overviewPrefix}testMore/4${splitter}uploadId4`,
`${overviewPrefix}testMore/5${splitter}uploadId5`,
`${overviewPrefix}prefixTest/5${splitter}uploadId5`,
],
v1: [`${DbPrefixes.Master}${overviewPrefix}test/1${splitter}uploadId1`,
`${DbPrefixes.Master}${overviewPrefix}test/2${splitter}uploadId2`,
`${DbPrefixes.Master}${overviewPrefix}test/3${splitter}uploadId3`,
`${DbPrefixes.Master}${overviewPrefix}testMore/4${splitter}uploadId4`,
`${DbPrefixes.Master}${overviewPrefix}testMore/5${splitter}uploadId5`,
`${DbPrefixes.Master}${overviewPrefix}prefixTest/5${splitter}uploadId5`,
],
`${DbPrefixes.Master}${overviewPrefix}test/2${splitter}uploadId2`,
`${DbPrefixes.Master}${overviewPrefix}test/3${splitter}uploadId3`,
`${DbPrefixes.Master}${overviewPrefix}testMore/4${splitter}uploadId4`,
`${DbPrefixes.Master}${overviewPrefix}testMore/5${splitter}uploadId5`,
`${DbPrefixes.Master}${overviewPrefix}prefixTest/5${splitter}uploadId5`,
],
};
const values = [
JSON.stringify({
@ -135,7 +135,7 @@ describe('Multipart Uploads listing algorithm', () => {
}));
it(`should perform a vFormat=${vFormat} listing of all keys`, () => {
const listingResult = performListing(dbListing, MultipartUploads,
listingParams, logger, vFormat);
listingParams, logger, vFormat);
assert.deepStrictEqual(listingResult, expectedResult);
});
@ -151,7 +151,7 @@ describe('Multipart Uploads listing algorithm', () => {
expectedResult.NextUploadIdMarker = '';
const listingResult = performListing(dbListing, MultipartUploads,
listingParams, logger, vFormat);
listingParams, logger, vFormat);
assert.deepStrictEqual(listingResult, expectedResult);
});
@ -167,7 +167,7 @@ describe('Multipart Uploads listing algorithm', () => {
expectedResult.MaxKeys = 3;
const listingResult = performListing(dbListing, MultipartUploads,
listingParams, logger, vFormat);
listingParams, logger, vFormat);
assert.deepStrictEqual(listingResult, expectedResult);
});
});

View File

@ -62,7 +62,7 @@ describe('Basic listing algorithm', () => {
it('Should support key-only listing', () => {
const res = performListing(['key1', 'key2'],
Basic, { maxKeys: 1 }, logger);
Basic, { maxKeys: 1 }, logger);
assert.deepStrictEqual(res, ['key1']);
});

View File

@ -85,7 +85,7 @@ const nonAlphabeticalData = [
const receivedData = data.map(item => ({ key: item.key, value: item.value }));
const receivedNonAlphaData = nonAlphabeticalData.map(
item => ({ key: item.key, value: item.value })
item => ({ key: item.key, value: item.value }),
);
const tests = [
@ -732,15 +732,15 @@ function getTestListing(test, data, vFormat) {
});
}
assert.strictEqual(delimiter.skipping(),
`${vFormat === 'v1' ? DbPrefixes.Master : ''}foo/`);
`${vFormat === 'v1' ? DbPrefixes.Master : ''}foo/`);
});
it('Should set Delimiter alphabeticalOrder field to the expected value', () => {
alphabeticalOrderTests.forEach(test => {
const delimiter = new Delimiter(test.params);
assert.strictEqual(delimiter.alphabeticalOrder,
test.expectedValue,
`${JSON.stringify(test.params)}`);
test.expectedValue,
`${JSON.stringify(test.params)}`);
});
});

View File

@ -63,7 +63,7 @@ function getListingKey(key, vFormat) {
'NextMarker is set and there is a delimiter', () => {
const key = 'key';
const delimiter = new DelimiterMaster({ delimiter: '/', marker: key },
fakeLogger, vFormat);
fakeLogger, vFormat);
/* Filter a master version to set NextMarker. */
const listingKey = getListingKey(key, vFormat);
@ -104,8 +104,8 @@ function getListingKey(key, vFormat) {
* delimiter it should return the next marker value. */
assert.strictEqual(delimiter.NextMarker, keyWithEndingDelimiter);
const skipKey = vFormat === 'v1' ?
`${DbPrefixes.Master}${keyWithEndingDelimiter}` :
keyWithEndingDelimiter;
`${DbPrefixes.Master}${keyWithEndingDelimiter}` :
keyWithEndingDelimiter;
assert.strictEqual(delimiter.skipping(), skipKey);
});
@ -159,7 +159,7 @@ function getListingKey(key, vFormat) {
const value = 'value';
const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
fakeLogger, vFormat);
fakeLogger, vFormat);
/* Filter the first entry with a common prefix. It should be
* accepted and added to the result. */
@ -167,7 +167,7 @@ function getListingKey(key, vFormat) {
key: getListingKey(prefix1Key1, vFormat),
value,
}),
FILTER_ACCEPT);
FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1],
Contents: [],
@ -182,7 +182,7 @@ function getListingKey(key, vFormat) {
key: getListingKey(prefix1Key2, vFormat),
value,
}),
FILTER_SKIP);
FILTER_SKIP);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1],
Contents: [],
@ -197,7 +197,7 @@ function getListingKey(key, vFormat) {
key: getListingKey(prefix2Key1, vFormat),
value,
}),
FILTER_ACCEPT);
FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1, commonPrefix2],
Contents: [],
@ -416,7 +416,7 @@ function getListingKey(key, vFormat) {
const value = 'value';
const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
fakeLogger, vFormat);
fakeLogger, vFormat);
/* Filter the two first entries with the same common prefix to add
* it to the result and reach the state where an entry is skipped
@ -448,7 +448,7 @@ function getListingKey(key, vFormat) {
const value = 'value';
const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
fakeLogger, vFormat);
fakeLogger, vFormat);
/* TODO: should be set to a whole key instead of just a common prefix
* once ZENKO-1048 is fixed. */
delimiter.NextMarker = commonPrefix;

View File

@ -60,7 +60,7 @@ const dataVersioned = {
{ key: `notes/spring/2.txt${VID_SEP}bar`, value: valueDeleteMarker },
{ key: `notes/spring/2.txt${VID_SEP}foo`, value: foo },
{ key: 'notes/spring/march/1.txt',
value: '{"versionId":"null","isNull":true}' },
value: '{"versionId":"null","isNull":true}' },
{ key: `notes/spring/march/1.txt${VID_SEP}bar`, value: bar },
{ key: `notes/spring/march/1.txt${VID_SEP}foo`, value: foo },
{ key: 'notes/summer/1.txt', value: bar },
@ -83,7 +83,7 @@ const dataVersioned = {
{ key: 'notes/zaphod/Beeblebrox.txt', value },
],
v1: [ // we add M and V prefixes in getTestListing() due to the
// test cases needing the original key to filter
// test cases needing the original key to filter
{ key: 'Pâtisserie=中文-español-English', value: bar },
{ key: `Pâtisserie=中文-español-English${VID_SEP}bar`, value: bar },
{ key: `Pâtisserie=中文-español-English${VID_SEP}foo`, value: foo },
@ -94,7 +94,7 @@ const dataVersioned = {
{ key: `notes/spring/2.txt${VID_SEP}bar`, value: valueDeleteMarker },
{ key: `notes/spring/2.txt${VID_SEP}foo`, value: foo },
{ key: 'notes/spring/march/1.txt',
value: '{"versionId":"null","isNull":true}' },
value: '{"versionId":"null","isNull":true}' },
{ key: `notes/spring/march/1.txt${VID_SEP}bar`, value: bar },
{ key: `notes/spring/march/1.txt${VID_SEP}foo`, value: foo },
{ key: 'notes/summer/1.txt', value: bar },
@ -147,7 +147,7 @@ const tests = [
new Test('all versions', {}, {
v0: {},
v1: [{ gte: DbPrefixes.Master, lt: inc(DbPrefixes.Master) },
{ gte: DbPrefixes.Version, lt: inc(DbPrefixes.Version) }],
{ gte: DbPrefixes.Version, lt: inc(DbPrefixes.Version) }],
}, {
Versions: receivedData,
CommonPrefixes: [],
@ -272,7 +272,7 @@ const tests = [
}],
}, {
Versions: receivedData.filter(entry =>
entry.key.indexOf('notes/summer') < 0),
entry.key.indexOf('notes/summer') < 0),
CommonPrefixes: ['notes/summer'],
Delimiter: 'notes/summer',
IsTruncated: false,
@ -502,7 +502,7 @@ function getListingKey(key, vFormat) {
}
if (vFormat === 'v1') {
const keyPrefix = key.includes(VID_SEP) ?
DbPrefixes.Version : DbPrefixes.Master;
DbPrefixes.Version : DbPrefixes.Master;
return `${keyPrefix}${key}`;
}
return assert.fail(`bad format ${vFormat}`);
@ -594,7 +594,7 @@ function getTestListing(test, data, vFormat) {
'does not contain the delimiter', () => {
const key = 'foo';
const delimiter = new DelimiterVersions({ delimiter: '/', marker: key },
logger, vFormat);
logger, vFormat);
/* Filter a master version to set NextMarker. */
const listingKey = getListingKey(key, vFormat);
@ -607,7 +607,7 @@ function getTestListing(test, data, vFormat) {
'contains the delimiter', () => {
const key = 'foo/bar';
const delimiter = new DelimiterVersions({ delimiter: '/', marker: key },
logger, vFormat);
logger, vFormat);
/* Filter a master version to set NextMarker. */
const listingKey = getListingKey(key, vFormat);
@ -628,7 +628,7 @@ function getTestListing(test, data, vFormat) {
'ends with the delimiter', () => {
const key = 'foo/';
const delimiter = new DelimiterVersions({ delimiter: '/', marker: key },
logger, vFormat);
logger, vFormat);
/* Filter a master version to set NextMarker. */
const listingKey = getListingKey(key, vFormat);
@ -679,7 +679,7 @@ function getTestListing(test, data, vFormat) {
it('should return good values for entries with different common prefixes', () => {
const delimiter = new DelimiterVersions({ delimiter: '/' },
logger, vFormat);
logger, vFormat);
/* Filter the first entry with a common prefix. It should be
* accepted and added to the result. */
@ -687,7 +687,7 @@ function getTestListing(test, data, vFormat) {
key: getListingKey('commonPrefix1/key1', vFormat),
value: '',
}),
FILTER_ACCEPT);
FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: ['commonPrefix1/'],
Versions: [],
@ -703,7 +703,7 @@ function getTestListing(test, data, vFormat) {
key: getListingKey('commonPrefix1/key2', vFormat),
value: '',
}),
FILTER_SKIP);
FILTER_SKIP);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: ['commonPrefix1/'],
Versions: [],
@ -719,7 +719,7 @@ function getTestListing(test, data, vFormat) {
key: getListingKey('commonPrefix2/key1', vFormat),
value: '',
}),
FILTER_ACCEPT);
FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: ['commonPrefix1/', 'commonPrefix2/'],
Versions: [],
@ -830,7 +830,7 @@ function getTestListing(test, data, vFormat) {
const value = '{"versionId":"version"}';
const delimiter = new DelimiterVersions({ delimiter: '/' },
logger, vFormat);
logger, vFormat);
/* Filter the two first entries with the same common prefix to add
* it to the result and reach the state where an entry is skipped
@ -906,7 +906,7 @@ function getTestListing(test, data, vFormat) {
const value = '{"versionId":"version"}';
const delimiter = new DelimiterVersions({ delimiter: '/' },
logger, vFormat);
logger, vFormat);
assert.strictEqual(delimiter.filter({
key: getListingKey(prefixKey1, vFormat),

View File

@ -52,7 +52,7 @@ function compareInt(a, b) {
}
function testMergeStreamWithIntegers(contents1, contents2,
usePauseResume, errorAtEnd, cb) {
usePauseResume, errorAtEnd, cb) {
const expectedItems = contents1.concat(contents2).sort(compareInt);
const mergeStream = new MergeStream(
new Streamify(contents1, errorAtEnd)
@ -189,39 +189,39 @@ describe('MergeStream', () => {
`${usePauseResume ? ' with pause/resume' : ''}` +
`${errorAtEnd ? ' with error' : ''}`;
it(`${nbEntries} sequential entries${fixtureDesc}`,
function bigMergeSequential(done) {
this.timeout(10000);
const stream1 = [];
const stream2 = [];
for (let i = 0; i < nbEntries; ++i) {
if (Math.floor(i / (nbEntries / 10)) % 2 === 0) {
stream1.push(i);
} else {
stream2.push(i);
(done) => {
jest.setTimeout(10000);
const stream1 = [];
const stream2 = [];
for (let i = 0; i < nbEntries; ++i) {
if (Math.floor(i / (nbEntries / 10)) % 2 === 0) {
stream1.push(i);
} else {
stream2.push(i);
}
}
}
testMergeStreamWithIntegers(
stream1, stream2, usePauseResume, errorAtEnd, done);
});
testMergeStreamWithIntegers(
stream1, stream2, usePauseResume, errorAtEnd, done);
});
it(`${nbEntries} randomly mingled entries${fixtureDesc}`,
function bigMergeRandom(done) {
this.timeout(10000);
const stream1 = [];
const stream2 = [];
let accu = nbEntries;
for (let i = 0; i < nbEntries; ++i) {
(done) => {
jest.setTimeout(10000);
const stream1 = [];
const stream2 = [];
let accu = nbEntries;
for (let i = 0; i < nbEntries; ++i) {
// picked two large arbitrary prime numbers to get a
// deterministic random-looking series
accu = (accu * 1592760451) % 8448053;
if (accu % 2 === 0) {
stream1.push(i);
} else {
stream2.push(i);
accu = (accu * 1592760451) % 8448053;
if (accu % 2 === 0) {
stream1.push(i);
} else {
stream2.push(i);
}
}
}
testMergeStreamWithIntegers(
stream1, stream2, usePauseResume, errorAtEnd, done);
});
testMergeStreamWithIntegers(
stream1, stream2, usePauseResume, errorAtEnd, done);
});
}
});
});
@ -232,29 +232,29 @@ describe('MergeStream', () => {
// event, so it's useful to test both cases
[3, 100].forEach(nbItemsPerStream => {
it(`destroy() should destroy both inner streams with ${nbItemsPerStream} items per stream`,
done => {
const stream1 = new Streamify(new Array(nbItemsPerStream).fill().map((e, i) => 2 * i));
const stream2 = new Streamify(new Array(nbItemsPerStream).fill().map((e, i) => 1 + 2 * i));
const mergeStream = new MergeStream(stream1, stream2, compareInt);
mergeStream.on('data', item => {
if (item === 5) {
mergeStream.destroy();
const s1ended = stream1._ended;
const s2ended = stream2._ended;
setTimeout(() => {
if (!s1ended) {
assert(stream1._destroyed);
}
if (!s2ended) {
assert(stream2._destroyed);
}
done();
}, 10);
}
done => {
const stream1 = new Streamify(new Array(nbItemsPerStream).fill().map((e, i) => 2 * i));
const stream2 = new Streamify(new Array(nbItemsPerStream).fill().map((e, i) => 1 + 2 * i));
const mergeStream = new MergeStream(stream1, stream2, compareInt);
mergeStream.on('data', item => {
if (item === 5) {
mergeStream.destroy();
const s1ended = stream1._ended;
const s2ended = stream2._ended;
setTimeout(() => {
if (!s1ended) {
assert(stream1._destroyed);
}
if (!s2ended) {
assert(stream2._destroyed);
}
done();
}, 10);
}
});
mergeStream.once('error', err => {
assert.fail(`unexpected error: ${err.message}`);
});
});
mergeStream.once('error', err => {
assert.fail(`unexpected error: ${err.message}`);
});
});
});
});

View File

@ -89,10 +89,10 @@ describe('AuthLoader class', () => {
// Check a failure when the type of field is different than
// expected
it(`should fail when modified field ${test[0]} ${test[1]}`,
done => {
should._exec = shouldFail;
should.modifiedField(obj, test[0], test[1], done);
});
done => {
should._exec = shouldFail;
should.modifiedField(obj, test[0], test[1], done);
});
}
});

View File

@ -14,27 +14,27 @@ const gcpCanonicalizedResource = request =>
describe('canonicalization', () => {
it('should construct a canonicalized header in the correct order for AWS',
() => {
const headers = {
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
'x-amz-request-payer': 'requester',
'x-amz-meta-meta': 'something very meta',
'x-amz-meta-bits': '0',
'x-amz-meta-blksize': '2097152',
'x-amz-meta-compress': '0',
'authorization': 'AWS accessKey1:V8g5UJUFmMzruMqUHVT6ZwvUw+M=',
'host': 's3.amazonaws.com:80',
'connection': 'Keep-Alive',
'user-agent': 'Cyberduck/4.7.2.18004 (Mac OS X/10.10.5) (x86_64)',
};
const canonicalizedHeader = getCanonicalizedAmzHeaders(headers);
assert.strictEqual(canonicalizedHeader,
'x-amz-meta-bits:0\n' +
() => {
const headers = {
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
'x-amz-request-payer': 'requester',
'x-amz-meta-meta': 'something very meta',
'x-amz-meta-bits': '0',
'x-amz-meta-blksize': '2097152',
'x-amz-meta-compress': '0',
'authorization': 'AWS accessKey1:V8g5UJUFmMzruMqUHVT6ZwvUw+M=',
'host': 's3.amazonaws.com:80',
'connection': 'Keep-Alive',
'user-agent': 'Cyberduck/4.7.2.18004 (Mac OS X/10.10.5) (x86_64)',
};
const canonicalizedHeader = getCanonicalizedAmzHeaders(headers);
assert.strictEqual(canonicalizedHeader,
'x-amz-meta-bits:0\n' +
'x-amz-meta-blksize:2097152\n' +
'x-amz-meta-compress:0\n' +
'x-amz-meta-meta:something very meta\n' +
'x-amz-request-payer:requester\n');
});
});
it('should return an empty string as the canonicalized ' +
'header if no amz headers', () => {
@ -62,7 +62,7 @@ describe('canonicalization', () => {
};
const canonicalizedResource = getCanonicalizedResource(request);
assert.strictEqual(canonicalizedResource,
'/bucket/obj?requestPayment=yes,please');
'/bucket/obj?requestPayment=yes,please');
});
it('should return the path as the canonicalized resource ' +
@ -92,27 +92,27 @@ describe('canonicalization', () => {
});
it('should construct a canonicalized header in the correct order for GCP',
() => {
const headers = {
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
'x-goog-request-payer': 'requester',
'x-goog-meta-meta': 'something very meta',
'x-goog-meta-bits': '0',
'x-goog-meta-blksize': '2097152',
'x-goog-meta-compress': '0',
'authorization': 'GOOG1 accessKey1:V8g5UJUFmMzruMqUHVT6ZwvUw+M=',
'host': 's3.amazonaws.com:80',
'connection': 'Keep-Alive',
'user-agent': 'Cyberduck/4.7.2.18004 (Mac OS X/10.10.5) (x86_64)',
};
const canonicalizedHeader = getCanonicalizedGcpHeaders(headers);
assert.strictEqual(canonicalizedHeader,
'x-goog-meta-bits:0\n' +
() => {
const headers = {
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
'x-goog-request-payer': 'requester',
'x-goog-meta-meta': 'something very meta',
'x-goog-meta-bits': '0',
'x-goog-meta-blksize': '2097152',
'x-goog-meta-compress': '0',
'authorization': 'GOOG1 accessKey1:V8g5UJUFmMzruMqUHVT6ZwvUw+M=',
'host': 's3.amazonaws.com:80',
'connection': 'Keep-Alive',
'user-agent': 'Cyberduck/4.7.2.18004 (Mac OS X/10.10.5) (x86_64)',
};
const canonicalizedHeader = getCanonicalizedGcpHeaders(headers);
assert.strictEqual(canonicalizedHeader,
'x-goog-meta-bits:0\n' +
'x-goog-meta-blksize:2097152\n' +
'x-goog-meta-compress:0\n' +
'x-goog-meta-meta:something very meta\n' +
'x-goog-request-payer:requester\n');
});
});
it('should return an empty string as the canonicalized ' +
'header if no goog headers', () => {
@ -140,7 +140,7 @@ describe('canonicalization', () => {
};
const canonicalizedResource = gcpCanonicalizedResource(request);
assert.strictEqual(canonicalizedResource,
'/bucket/obj?billing=yes,please');
'/bucket/obj?billing=yes,please');
});
it('should return the path as the canonicalized resource ' +

View File

@ -33,7 +33,7 @@ describe('Public Access', () => {
auth(request, logger, (err, authInfo) => {
assert.strictEqual(err, null);
assert.strictEqual(authInfo.getCanonicalID(),
publicAuthInfo.getCanonicalID());
publicAuthInfo.getCanonicalID());
done();
}, 's3', requestContext);
});

Some files were not shown because too many files have changed in this diff Show More