Compare commits

..

No commits in common. "0509e8ecd3fac537a41744b103db644c3bb5d907" and "310834c237da624800ea6b4e291045808aa9bf80" have entirely different histories.

162 changed files with 3431 additions and 11285 deletions

View File

@ -39,14 +39,10 @@ jobs:
run: yarn --silent lint -- --max-warnings 0 run: yarn --silent lint -- --max-warnings 0
- name: lint markdown - name: lint markdown
run: yarn --silent lint_md run: yarn --silent lint_md
- name: add hostname - name: run unit tests
run: | run: yarn test
sudo sh -c "echo '127.0.0.1 testrequestbucket.localhost' >> /etc/hosts"
- name: test and coverage
run: yarn --silent coverage
- name: run functional tests - name: run functional tests
run: yarn ft_test run: yarn ft_test
- uses: codecov/codecov-action@v2
- name: run executables tests - name: run executables tests
run: yarn install && yarn test run: yarn install && yarn test
working-directory: 'lib/executables/pensieveCreds/' working-directory: 'lib/executables/pensieveCreds/'

View File

@ -1,7 +1,5 @@
# Arsenal # Arsenal
[![codecov](https://codecov.io/gh/scality/Arsenal/branch/development/8.1/graph/badge.svg?token=X0esXhJSwb)](https://codecov.io/gh/scality/Arsenal)
Common utilities for the S3 project components Common utilities for the S3 project components
Within this repository, you will be able to find the shared libraries for the Within this repository, you will be able to find the shared libraries for the

View File

@ -85,66 +85,6 @@ Used to store the bucket lifecycle configuration info
### Properties Added ### Properties Added
```javascript
this._uid = uid || uuid();
```
### Usage
Used to set a unique identifier on a bucket
## Model version 8
### Properties Added
```javascript
this._readLocationConstraint = readLocationConstraint || null;
```
### Usage
Used to store default read location of the bucket
## Model version 9
### Properties Added
```javascript
this._isNFS = isNFS || null;
```
### Usage
Used to determine whether the bucket may be accessed through NFS
## Model version 10
### Properties Added
```javascript
this._ingestion = ingestionConfig || null;
```
### Usage
Used to store the ingestion status of a bucket
## Model version 11
### Properties Added
```javascript
this._azureInfo = azureInfo || null;
```
### Usage
Used to store Azure storage account specific information
## Model version 12
### Properties Added
```javascript ```javascript
this._objectLockEnabled = objectLockEnabled || false; this._objectLockEnabled = objectLockEnabled || false;
this._objectLockConfiguration = objectLockConfiguration || null; this._objectLockConfiguration = objectLockConfiguration || null;
@ -155,7 +95,7 @@ this._objectLockConfiguration = objectLockConfiguration || null;
Used to determine whether object lock capabilities are enabled on a bucket and Used to determine whether object lock capabilities are enabled on a bucket and
to store the object lock configuration of the bucket to store the object lock configuration of the bucket
## Model version 13 ## Model version 8
### Properties Added ### Properties Added
@ -167,7 +107,7 @@ this._notificationConfiguration = notificationConfiguration || null;
Used to store the bucket notification configuration info Used to store the bucket notification configuration info
## Model version 14 ## Model version 9
### Properties Added ### Properties Added
@ -178,3 +118,15 @@ this._serverSideEncryption.configuredMasterKeyId = configuredMasterKeyId || unde
### Usage ### Usage
Used to store the users configured KMS key id Used to store the users configured KMS key id
## Model version 10
### Properties Added
```javascript
this._uid = uid || uuid();
```
### Usage
Used to set a unique identifier on a bucket

View File

@ -26,7 +26,7 @@
}, },
"BucketAlreadyOwnedByYou": { "BucketAlreadyOwnedByYou": {
"code": 409, "code": 409,
"description": "A bucket with this name exists and is already owned by you" "description": "Your previous request to create the named bucket succeeded and you already own it. You get this error in all AWS regions except US Standard, us-east-1. In us-east-1 region, you will get 200 OK, but it is no-op (if bucket exists S3 will not do anything)."
}, },
"BucketNotEmpty": { "BucketNotEmpty": {
"code": 409, "code": 409,
@ -403,10 +403,6 @@
"code": 409, "code": 409,
"description": "The request was rejected because it attempted to create a resource that already exists." "description": "The request was rejected because it attempted to create a resource that already exists."
}, },
"KeyAlreadyExists": {
"code": 409,
"description": "The request was rejected because it attempted to create a resource that already exists."
},
"ServiceFailure": { "ServiceFailure": {
"code": 500, "code": 500,
"description": "Server error: the request processing has failed because of an unknown error, exception or failure." "description": "Server error: the request processing has failed because of an unknown error, exception or failure."
@ -764,10 +760,5 @@
"ReadOnly": { "ReadOnly": {
"description": "trying to write to read only back-end", "description": "trying to write to read only back-end",
"code": 403 "code": 403
},
"_comment": "----------------------- authbackend -----------------------",
"AuthMethodNotImplemented": {
"description": "AuthMethodNotImplemented",
"code": 501
} }
} }

View File

@ -1,28 +0,0 @@
{
"groups": {
"default": {
"packages": [
"lib/executables/pensieveCreds/package.json",
"package.json"
]
}
},
"branchPrefix": "improvement/greenkeeper.io/",
"commitMessages": {
"initialBadge": "docs(readme): add Greenkeeper badge",
"initialDependencies": "chore(package): update dependencies",
"initialBranches": "chore(bert-e): whitelist greenkeeper branches",
"dependencyUpdate": "fix(package): update ${dependency} to version ${version}",
"devDependencyUpdate": "chore(package): update ${dependency} to version ${version}",
"dependencyPin": "fix: pin ${dependency} to ${oldVersionResolved}",
"devDependencyPin": "chore: pin ${dependency} to ${oldVersionResolved}",
"closes": "\n\nCloses #${number}"
},
"ignore": [
"ajv",
"eslint",
"eslint-plugin-react",
"eslint-config-airbnb",
"eslint-config-scality"
]
}

202
index.js
View File

@ -1,202 +0,0 @@
module.exports = {
auth: require('./lib/auth/auth'),
constants: require('./lib/constants'),
db: require('./lib/db'),
errors: require('./lib/errors.js'),
errorUtils: require('./lib/errorUtils'),
shuffle: require('./lib/shuffle'),
stringHash: require('./lib/stringHash'),
ipCheck: require('./lib/ipCheck'),
jsutil: require('./lib/jsutil'),
https: {
ciphers: require('./lib/https/ciphers.js'),
dhparam: require('./lib/https/dh2048.js'),
},
algorithms: {
list: require('./lib/algos/list/exportAlgos'),
listTools: {
DelimiterTools: require('./lib/algos/list/tools'),
Skip: require('./lib/algos/list/skip'),
},
cache: {
LRUCache: require('./lib/algos/cache/LRUCache'),
},
stream: {
MergeStream: require('./lib/algos/stream/MergeStream'),
},
SortedSet: require('./lib/algos/set/SortedSet'),
},
policies: {
evaluators: require('./lib/policyEvaluator/evaluator.js'),
validateUserPolicy: require('./lib/policy/policyValidator')
.validateUserPolicy,
evaluatePrincipal: require('./lib/policyEvaluator/principal'),
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
requestUtils: require('./lib/policyEvaluator/requestUtils'),
actionMaps: require('./lib/policyEvaluator/utils/actionMaps'),
},
Clustering: require('./lib/Clustering'),
testing: {
matrix: require('./lib/testing/matrix.js'),
},
versioning: {
VersioningConstants: require('./lib/versioning/constants.js')
.VersioningConstants,
Version: require('./lib/versioning/Version.js').Version,
VersionID: require('./lib/versioning/VersionID.js'),
WriteGatheringManager: require('./lib/versioning/WriteGatheringManager.js'),
WriteCache: require('./lib/versioning/WriteCache.js'),
VersioningRequestProcessor: require('./lib/versioning/VersioningRequestProcessor.js'),
},
network: {
http: {
server: require('./lib/network/http/server'),
utils: require('./lib/network/http/utils'),
},
rpc: require('./lib/network/rpc/rpc'),
level: require('./lib/network/rpc/level-net'),
rest: {
RESTServer: require('./lib/network/rest/RESTServer'),
RESTClient: require('./lib/network/rest/RESTClient'),
},
RoundRobin: require('./lib/network/RoundRobin'),
probe: {
ProbeServer: require('./lib/network/probe/ProbeServer'),
HealthProbeServer:
require('./lib/network/probe/HealthProbeServer.js'),
Utils: require('./lib/network/probe/Utils.js'),
},
kmip: require('./lib/network/kmip'),
kmipClient: require('./lib/network/kmip/Client'),
},
s3routes: {
routes: require('./lib/s3routes/routes'),
routesUtils: require('./lib/s3routes/routesUtils'),
},
s3middleware: {
userMetadata: require('./lib/s3middleware/userMetadata'),
convertToXml: require('./lib/s3middleware/convertToXml'),
escapeForXml: require('./lib/s3middleware/escapeForXml'),
objectLegalHold: require('./lib/s3middleware/objectLegalHold'),
tagging: require('./lib/s3middleware/tagging'),
checkDateModifiedHeaders:
require('./lib/s3middleware/validateConditionalHeaders')
.checkDateModifiedHeaders,
validateConditionalHeaders:
require('./lib/s3middleware/validateConditionalHeaders')
.validateConditionalHeaders,
MD5Sum: require('./lib/s3middleware/MD5Sum'),
NullStream: require('./lib/s3middleware/nullStream'),
objectUtils: require('./lib/s3middleware/objectUtils'),
azureHelper: {
mpuUtils:
require('./lib/s3middleware/azureHelpers/mpuUtils'),
ResultsCollector:
require('./lib/s3middleware/azureHelpers/ResultsCollector'),
SubStreamInterface:
require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
},
prepareStream: require('./lib/s3middleware/prepareStream'),
processMpuParts: require('./lib/s3middleware/processMpuParts'),
retention: require('./lib/s3middleware/objectRetention'),
lifecycleHelpers: require('./lib/s3middleware/lifecycleHelpers'),
},
storage: {
metadata: {
MetadataWrapper: require('./lib/storage/metadata/MetadataWrapper'),
bucketclient: {
BucketClientInterface:
require('./lib/storage/metadata/bucketclient/' +
'BucketClientInterface'),
LogConsumer:
require('./lib/storage/metadata/bucketclient/LogConsumer'),
},
file: {
BucketFileInterface:
require('./lib/storage/metadata/file/BucketFileInterface'),
MetadataFileServer:
require('./lib/storage/metadata/file/MetadataFileServer'),
MetadataFileClient:
require('./lib/storage/metadata/file/MetadataFileClient'),
},
inMemory: {
metastore:
require('./lib/storage/metadata/in_memory/metastore'),
metadata: require('./lib/storage/metadata/in_memory/metadata'),
bucketUtilities:
require('./lib/storage/metadata/in_memory/bucket_utilities'),
},
mongoclient: {
MongoClientInterface:
require('./lib/storage/metadata/mongoclient/' +
'MongoClientInterface'),
LogConsumer:
require('./lib/storage/metadata/mongoclient/LogConsumer'),
},
proxy: {
Server: require('./lib/storage/metadata/proxy/Server'),
},
},
data: {
DataWrapper: require('./lib/storage/data/DataWrapper'),
MultipleBackendGateway:
require('./lib/storage/data/MultipleBackendGateway'),
parseLC: require('./lib/storage/data/LocationConstraintParser'),
file: {
DataFileStore:
require('./lib/storage/data/file/DataFileStore'),
DataFileInterface:
require('./lib/storage/data/file/DataFileInterface'),
},
external: {
AwsClient: require('./lib/storage/data/external/AwsClient'),
AzureClient: require('./lib/storage/data/external/AzureClient'),
GcpClient: require('./lib/storage/data/external/GcpClient'),
GCP: require('./lib/storage/data/external/GCP/GcpService'),
GcpUtils: require('./lib/storage/data/external/GCP/GcpUtils'),
GcpSigner: require('./lib/storage/data/external/GCP/GcpSigner'),
PfsClient: require('./lib/storage/data/external/PfsClient'),
backendUtils: require('./lib/storage/data/external/utils'),
},
inMemory: {
datastore: require('./lib/storage/data/in_memory/datastore'),
},
},
utils: require('./lib/storage/utils'),
},
models: {
BackendInfo: require('./lib/models/BackendInfo'),
BucketInfo: require('./lib/models/BucketInfo'),
BucketAzureInfo: require('./lib/models/BucketAzureInfo'),
ObjectMD: require('./lib/models/ObjectMD'),
ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
ObjectMDAzureInfo: require('./lib/models/ObjectMDAzureInfo'),
ARN: require('./lib/models/ARN'),
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
ReplicationConfiguration:
require('./lib/models/ReplicationConfiguration'),
LifecycleConfiguration:
require('./lib/models/LifecycleConfiguration'),
LifecycleRule: require('./lib/models/LifecycleRule'),
BucketPolicy: require('./lib/models/BucketPolicy'),
ObjectLockConfiguration:
require('./lib/models/ObjectLockConfiguration'),
NotificationConfiguration:
require('./lib/models/NotificationConfiguration'),
},
metrics: {
StatsClient: require('./lib/metrics/StatsClient'),
StatsModel: require('./lib/metrics/StatsModel'),
RedisClient: require('./lib/metrics/RedisClient'),
ZenkoMetrics: require('./lib/metrics/ZenkoMetrics'),
},
pensieve: {
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
},
stream: {
readJSONStreamObject: require('./lib/stream/readJSONStreamObject'),
},
patches: {
locationConstraints: require('./lib/patches/locationConstraints'),
},
};

View File

@ -2,7 +2,6 @@ export const auth = require('./lib/auth/auth');
export const constants = require('./lib/constants'); export const constants = require('./lib/constants');
export const db = require('./lib/db'); export const db = require('./lib/db');
export const errors = require('./lib/errors.js'); export const errors = require('./lib/errors.js');
export const errorUtils = require('./lib/errorUtils');
export const shuffle = require('./lib/shuffle'); export const shuffle = require('./lib/shuffle');
export const stringHash = require('./lib/stringHash'); export const stringHash = require('./lib/stringHash');
export const ipCheck = require('./lib/ipCheck'); export const ipCheck = require('./lib/ipCheck');
@ -15,10 +14,15 @@ export const https = {
}; };
export const algorithms = { export const algorithms = {
list: require('./lib/algos/list/exportAlgos'), list: {
Basic: require('./lib/algos/list/basic').List,
Delimiter: require('./lib/algos/list/delimiter').Delimiter,
DelimiterVersions: require('./lib/algos/list/delimiterVersions').DelimiterVersions,
DelimiterMaster: require('./lib/algos/list/delimiterMaster').DelimiterMaster,
MPU: require('./lib/algos/list/MPU').MultipartUploads,
},
listTools: { listTools: {
DelimiterTools: require('./lib/algos/list/tools'), DelimiterTools: require('./lib/algos/list/tools'),
Skip: require('./lib/algos/list/skip'),
}, },
cache: { cache: {
LRUCache: require('./lib/algos/cache/LRUCache'), LRUCache: require('./lib/algos/cache/LRUCache'),
@ -54,7 +58,6 @@ export const versioning = {
export const network = { export const network = {
http: { http: {
server: require('./lib/network/http/server'), server: require('./lib/network/http/server'),
utils: require('./lib/network/http/utils'),
}, },
rpc: require('./lib/network/rpc/rpc'), rpc: require('./lib/network/rpc/rpc'),
level: require('./lib/network/rpc/level-net'), level: require('./lib/network/rpc/level-net'),
@ -62,13 +65,10 @@ export const network = {
RESTServer: require('./lib/network/rest/RESTServer'), RESTServer: require('./lib/network/rest/RESTServer'),
RESTClient: require('./lib/network/rest/RESTClient'), RESTClient: require('./lib/network/rest/RESTClient'),
}, },
RoundRobin: require('./lib/network/RoundRobin'),
probe: { probe: {
ProbeServer: require('./lib/network/probe/ProbeServer'), ProbeServer: require('./lib/network/probe/ProbeServer'),
HealthProbeServer:
require('./lib/network/probe/HealthProbeServer.js'),
Utils: require('./lib/network/probe/Utils.js'),
}, },
RoundRobin: require('./lib/network/RoundRobin'),
kmip: require('./lib/network/kmip'), kmip: require('./lib/network/kmip'),
kmipClient: require('./lib/network/kmip/Client'), kmipClient: require('./lib/network/kmip/Client'),
}; };
@ -84,24 +84,16 @@ export const s3middleware = {
escapeForXml: require('./lib/s3middleware/escapeForXml'), escapeForXml: require('./lib/s3middleware/escapeForXml'),
objectLegalHold: require('./lib/s3middleware/objectLegalHold'), objectLegalHold: require('./lib/s3middleware/objectLegalHold'),
tagging: require('./lib/s3middleware/tagging'), tagging: require('./lib/s3middleware/tagging'),
checkDateModifiedHeaders:
require('./lib/s3middleware/validateConditionalHeaders')
.checkDateModifiedHeaders,
validateConditionalHeaders: validateConditionalHeaders:
require('./lib/s3middleware/validateConditionalHeaders') require('./lib/s3middleware/validateConditionalHeaders').validateConditionalHeaders,
.validateConditionalHeaders,
MD5Sum: require('./lib/s3middleware/MD5Sum'), MD5Sum: require('./lib/s3middleware/MD5Sum'),
NullStream: require('./lib/s3middleware/nullStream'), NullStream: require('./lib/s3middleware/nullStream'),
objectUtils: require('./lib/s3middleware/objectUtils'), objectUtils: require('./lib/s3middleware/objectUtils'),
azureHelper: { azureHelper: {
mpuUtils: mpuUtils: require('./lib/s3middleware/azureHelpers/mpuUtils'),
require('./lib/s3middleware/azureHelpers/mpuUtils'), ResultsCollector: require('./lib/s3middleware/azureHelpers/ResultsCollector'),
ResultsCollector: SubStreamInterface: require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
require('./lib/s3middleware/azureHelpers/ResultsCollector'),
SubStreamInterface:
require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
}, },
prepareStream: require('./lib/s3middleware/prepareStream'),
processMpuParts: require('./lib/s3middleware/processMpuParts'), processMpuParts: require('./lib/s3middleware/processMpuParts'),
retention: require('./lib/s3middleware/objectRetention'), retention: require('./lib/s3middleware/objectRetention'),
lifecycleHelpers: require('./lib/s3middleware/lifecycleHelpers'), lifecycleHelpers: require('./lib/s3middleware/lifecycleHelpers'),
@ -172,24 +164,17 @@ export const storage = {
}; };
export const models = { export const models = {
BackendInfo: require('./lib/models/BackendInfo'),
BucketInfo: require('./lib/models/BucketInfo'), BucketInfo: require('./lib/models/BucketInfo'),
BucketAzureInfo: require('./lib/models/BucketAzureInfo'),
ObjectMD: require('./lib/models/ObjectMD'), ObjectMD: require('./lib/models/ObjectMD'),
ObjectMDLocation: require('./lib/models/ObjectMDLocation'), ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
ObjectMDAzureInfo: require('./lib/models/ObjectMDAzureInfo'),
ARN: require('./lib/models/ARN'), ARN: require('./lib/models/ARN'),
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'), WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
ReplicationConfiguration: ReplicationConfiguration: require('./lib/models/ReplicationConfiguration'),
require('./lib/models/ReplicationConfiguration'), LifecycleConfiguration: require('./lib/models/LifecycleConfiguration'),
LifecycleConfiguration:
require('./lib/models/LifecycleConfiguration'),
LifecycleRule: require('./lib/models/LifecycleRule'), LifecycleRule: require('./lib/models/LifecycleRule'),
BucketPolicy: require('./lib/models/BucketPolicy'), BucketPolicy: require('./lib/models/BucketPolicy'),
ObjectLockConfiguration: ObjectLockConfiguration: require('./lib/models/ObjectLockConfiguration'),
require('./lib/models/ObjectLockConfiguration'), NotificationConfiguration: require('./lib/models/NotificationConfiguration'),
NotificationConfiguration:
require('./lib/models/NotificationConfiguration'),
}; };
export const metrics = { export const metrics = {
@ -206,7 +191,3 @@ export const pensieve = {
export const stream = { export const stream = {
readJSONStreamObject: require('./lib/stream/readJSONStreamObject'), readJSONStreamObject: require('./lib/stream/readJSONStreamObject'),
}; };
export const patches = {
locationConstraints: require('./lib/patches/locationConstraints'),
};

View File

@ -91,7 +91,7 @@ class Vault {
requestContext: serializedRCsArr, requestContext: serializedRCsArr,
}, },
(err, userInfo) => vaultSignatureCb(err, userInfo, (err, userInfo) => vaultSignatureCb(err, userInfo,
params.log, callback), params.log, callback)
); );
} }
@ -146,7 +146,7 @@ class Vault {
requestContext: serializedRCs, requestContext: serializedRCs,
}, },
(err, userInfo) => vaultSignatureCb(err, userInfo, (err, userInfo) => vaultSignatureCb(err, userInfo,
params.log, callback, streamingV4Params), params.log, callback, streamingV4Params)
); );
} }
@ -232,28 +232,28 @@ class Vault {
*/ */
getAccountIds(canonicalIDs, log, callback) { getAccountIds(canonicalIDs, log, callback) {
log.trace('getting accountIds from Vault based on canonicalIDs', log.trace('getting accountIds from Vault based on canonicalIDs',
{ canonicalIDs }); { canonicalIDs });
this.client.getAccountIds(canonicalIDs, this.client.getAccountIds(canonicalIDs,
{ reqUid: log.getSerializedUids() }, { reqUid: log.getSerializedUids() },
(err, info) => { (err, info) => {
if (err) { if (err) {
log.debug('received error message from vault', log.debug('received error message from vault',
{ errorMessage: err }); { errorMessage: err });
return callback(err); return callback(err);
} }
const infoFromVault = info.message.body; const infoFromVault = info.message.body;
log.trace('info received from vault', { infoFromVault }); log.trace('info received from vault', { infoFromVault });
const result = {}; const result = {};
/* If the accountId was not found in Vault, do not /* If the accountId was not found in Vault, do not
send the canonicalID back to the API */ send the canonicalID back to the API */
Object.keys(infoFromVault).forEach(key => { Object.keys(infoFromVault).forEach(key => {
if (infoFromVault[key] !== 'NotFound' && if (infoFromVault[key] !== 'NotFound' &&
infoFromVault[key] !== 'WrongFormat') { infoFromVault[key] !== 'WrongFormat') {
result[key] = infoFromVault[key]; result[key] = infoFromVault[key];
} }
});
return callback(null, result);
}); });
return callback(null, result);
});
} }
/** checkPolicies -- call Vault to evaluate policies /** checkPolicies -- call Vault to evaluate policies

View File

@ -10,13 +10,11 @@ const constants = require('../constants');
const constructStringToSignV2 = require('./v2/constructStringToSign'); const constructStringToSignV2 = require('./v2/constructStringToSign');
const constructStringToSignV4 = require('./v4/constructStringToSign'); const constructStringToSignV4 = require('./v4/constructStringToSign');
const convertUTCtoISO8601 = require('./v4/timeUtils').convertUTCtoISO8601; const convertUTCtoISO8601 = require('./v4/timeUtils').convertUTCtoISO8601;
const vaultUtilities = require('./backends/in_memory/vaultUtilities'); const vaultUtilities = require('./in_memory/vaultUtilities');
const inMemoryBackend = require('./backends/in_memory/Backend'); const backend = require('./in_memory/Backend');
const validateAuthConfig = require('./backends/in_memory/validateAuthConfig'); const validateAuthConfig = require('./in_memory/validateAuthConfig');
const AuthLoader = require('./backends/in_memory/AuthLoader'); const AuthLoader = require('./in_memory/AuthLoader');
const Vault = require('./Vault'); const Vault = require('./Vault');
const baseBackend = require('./backends/base');
const chainBackend = require('./backends/ChainBackend');
let vault = null; let vault = null;
const auth = {}; const auth = {};
@ -74,7 +72,7 @@ function extractParams(request, log, awsService, data) {
version = 'v4'; version = 'v4';
} else { } else {
log.trace('invalid authorization security header', log.trace('invalid authorization security header',
{ header: authHeader }); { header: authHeader });
return { err: errors.AccessDenied }; return { err: errors.AccessDenied };
} }
} else if (data.Signature) { } else if (data.Signature) {
@ -89,7 +87,7 @@ function extractParams(request, log, awsService, data) {
if (version !== null && method !== null) { if (version !== null && method !== null) {
if (!checkFunctions[version] || !checkFunctions[version][method]) { if (!checkFunctions[version] || !checkFunctions[version][method]) {
log.trace('invalid auth version or method', log.trace('invalid auth version or method',
{ version, authMethod: method }); { version, authMethod: method });
return { err: errors.NotImplemented }; return { err: errors.NotImplemented };
} }
log.trace('identified auth method', { version, authMethod: method }); log.trace('identified auth method', { version, authMethod: method });
@ -161,7 +159,7 @@ function doAuth(request, log, cb, awsService, requestContexts) {
* @return {undefined} * @return {undefined}
*/ */
function generateV4Headers(request, data, accessKey, secretKeyValue, function generateV4Headers(request, data, accessKey, secretKeyValue,
awsService, proxyPath, sessionToken) { awsService, proxyPath, sessionToken) {
Object.assign(request, { headers: {} }); Object.assign(request, { headers: {} });
const amzDate = convertUTCtoISO8601(Date.now()); const amzDate = convertUTCtoISO8601(Date.now());
// get date without time // get date without time
@ -194,16 +192,16 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
.filter(headerName => .filter(headerName =>
headerName.startsWith('x-amz-') headerName.startsWith('x-amz-')
|| headerName.startsWith('x-scal-') || headerName.startsWith('x-scal-')
|| headerName === 'host', || headerName === 'host'
).sort().join(';'); ).sort().join(';');
const params = { request, signedHeaders, payloadChecksum, const params = { request, signedHeaders, payloadChecksum,
credentialScope, timestamp, query: data, credentialScope, timestamp, query: data,
awsService: service, proxyPath }; awsService: service, proxyPath };
const stringToSign = constructStringToSignV4(params); const stringToSign = constructStringToSignV4(params);
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue, const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue,
region, region,
scopeDate, scopeDate,
service); service);
const signature = crypto.createHmac('sha256', signingKey) const signature = crypto.createHmac('sha256', signingKey)
.update(stringToSign, 'binary').digest('hex'); .update(stringToSign, 'binary').digest('hex');
const authorizationHeader = `${algorithm} Credential=${accessKey}` + const authorizationHeader = `${algorithm} Credential=${accessKey}` +
@ -224,14 +222,10 @@ module.exports = {
constructStringToSignV2, constructStringToSignV2,
}, },
inMemory: { inMemory: {
backend: inMemoryBackend, backend,
validateAuthConfig, validateAuthConfig,
AuthLoader, AuthLoader,
}, },
backends: {
baseBackend,
chainBackend,
},
AuthInfo, AuthInfo,
Vault, Vault,
}; };

View File

@ -1,189 +0,0 @@
'use strict'; // eslint-disable-line strict
const assert = require('assert');
const async = require('async');
const errors = require('../../errors');
const BaseBackend = require('./base');
/**
* Class that provides an authentication backend that will verify signatures
* and retrieve emails and canonical ids associated with an account using a
* given list of authentication backends and vault clients.
*
* @class ChainBackend
*/
class ChainBackend extends BaseBackend {
/**
* @constructor
* @param {string} service - service id
* @param {object[]} clients - list of authentication backends or vault clients
*/
constructor(service, clients) {
super(service);
assert(Array.isArray(clients) && clients.length > 0, 'invalid client list');
assert(clients.every(client =>
typeof client.verifySignatureV4 === 'function' &&
typeof client.verifySignatureV2 === 'function' &&
typeof client.getCanonicalIds === 'function' &&
typeof client.getEmailAddresses === 'function' &&
typeof client.checkPolicies === 'function' &&
typeof client.healthcheck === 'function',
), 'invalid client: missing required auth backend methods');
this._clients = clients;
}
/*
* try task against each client for one to be successful
*/
_tryEachClient(task, cb) {
async.tryEach(this._clients.map(client => done => task(client, done)), cb);
}
/*
* apply task to all clients
*/
_forEachClient(task, cb) {
async.map(this._clients, task, cb);
}
verifySignatureV2(stringToSign, signatureFromRequest, accessKey, options, callback) {
this._tryEachClient((client, done) => client.verifySignatureV2(
stringToSign,
signatureFromRequest,
accessKey,
options,
done,
), callback);
}
verifySignatureV4(stringToSign, signatureFromRequest, accessKey, region, scopeDate, options, callback) {
this._tryEachClient((client, done) => client.verifySignatureV4(
stringToSign,
signatureFromRequest,
accessKey,
region,
scopeDate,
options,
done,
), callback);
}
static _mergeObjects(objectResponses) {
return objectResponses.reduce(
(retObj, resObj) => Object.assign(retObj, resObj.message.body),
{});
}
getCanonicalIds(emailAddresses, options, callback) {
this._forEachClient(
(client, done) => client.getCanonicalIds(emailAddresses, options, done),
(err, res) => {
if (err) {
return callback(err);
}
// TODO: atm naive merge, better handling of conflicting email results
return callback(null, {
message: {
body: ChainBackend._mergeObjects(res),
},
});
});
}
getEmailAddresses(canonicalIDs, options, callback) {
this._forEachClient(
(client, done) => client.getEmailAddresses(canonicalIDs, options, done),
(err, res) => {
if (err) {
return callback(err);
}
return callback(null, {
message: {
body: ChainBackend._mergeObjects(res),
},
});
});
}
/*
* merge policy responses into a single message
*/
static _mergePolicies(policyResponses) {
const policyMap = {};
policyResponses.forEach(resp => {
if (!resp.message || !Array.isArray(resp.message.body)) {
return;
}
resp.message.body.forEach(policy => {
const key = (policy.arn || '') + (policy.versionId || '');
if (!policyMap[key] || !policyMap[key].isAllowed) {
policyMap[key] = policy;
}
// else is duplicate policy
});
});
return Object.keys(policyMap).map(key => {
const policyRes = { isAllowed: policyMap[key].isAllowed };
if (policyMap[key].arn !== '') {
policyRes.arn = policyMap[key].arn;
}
if (policyMap[key].versionId) {
policyRes.versionId = policyMap[key].versionId;
}
return policyRes;
});
}
/*
response format:
{ message: {
body: [{}],
code: number,
message: string,
} }
*/
checkPolicies(requestContextParams, userArn, options, callback) {
this._forEachClient((client, done) => client.checkPolicies(
requestContextParams,
userArn,
options,
done,
), (err, res) => {
if (err) {
return callback(err);
}
return callback(null, {
message: {
body: ChainBackend._mergePolicies(res),
},
});
});
}
healthcheck(reqUid, callback) {
this._forEachClient((client, done) =>
client.healthcheck(reqUid, (err, res) => done(null, {
error: !!err ? err : null,
status: res,
}),
), (err, res) => {
if (err) {
return callback(err);
}
const isError = res.some(results => !!results.error);
if (isError) {
return callback(errors.InternalError, res);
}
return callback(null, res);
});
}
}
module.exports = ChainBackend;

View File

@ -1,86 +0,0 @@
'use strict'; // eslint-disable-line strict
const errors = require('../../errors');
/**
* Base backend class
*
* @class BaseBackend
*/
class BaseBackend {
/**
* @constructor
* @param {string} service - service identifer for construction arn
*/
constructor(service) {
this.service = service;
}
/** verifySignatureV2
* @param {string} stringToSign - string to sign built per AWS rules
* @param {string} signatureFromRequest - signature sent with request
* @param {string} accessKey - account accessKey
* @param {object} options - contains algorithm (SHA1 or SHA256)
* @param {function} callback - callback with either error or user info
* @return {function} calls callback
*/
verifySignatureV2(stringToSign, signatureFromRequest,
accessKey, options, callback) {
return callback(errors.AuthMethodNotImplemented);
}
/** verifySignatureV4
* @param {string} stringToSign - string to sign built per AWS rules
* @param {string} signatureFromRequest - signature sent with request
* @param {string} accessKey - account accessKey
* @param {string} region - region specified in request credential
* @param {string} scopeDate - date specified in request credential
* @param {object} options - options to send to Vault
* (just contains reqUid for logging in Vault)
* @param {function} callback - callback with either error or user info
* @return {function} calls callback
*/
verifySignatureV4(stringToSign, signatureFromRequest, accessKey,
region, scopeDate, options, callback) {
return callback(errors.AuthMethodNotImplemented);
}
/**
* Gets canonical ID's for a list of accounts
* based on email associated with account
* @param {array} emails - list of email addresses
* @param {object} options - to send log id to vault
* @param {function} callback - callback to calling function
* @returns {function} callback with either error or
* object with email addresses as keys and canonical IDs
* as values
*/
getCanonicalIds(emails, options, callback) {
return callback(errors.AuthMethodNotImplemented);
}
/**
* Gets email addresses (referred to as diplay names for getACL's)
* for a list of accounts based on canonical IDs associated with account
* @param {array} canonicalIDs - list of canonicalIDs
* @param {object} options - to send log id to vault
* @param {function} callback - callback to calling function
* @returns {function} callback with either error or
* an object from Vault containing account canonicalID
* as each object key and an email address as the value (or "NotFound")
*/
getEmailAddresses(canonicalIDs, options, callback) {
return callback(errors.AuthMethodNotImplemented);
}
checkPolicies(requestContextParams, userArn, options, callback) {
return callback(null, { message: { body: [] } });
}
healthcheck(reqUid, callback) {
return callback(null, { code: 200, message: 'OK' });
}
}
module.exports = BaseBackend;

View File

@ -3,7 +3,7 @@ const glob = require('simple-glob');
const joi = require('@hapi/joi'); const joi = require('@hapi/joi');
const werelogs = require('werelogs'); const werelogs = require('werelogs');
const ARN = require('../../../models/ARN'); const ARN = require('../../models/ARN');
/** /**
* Load authentication information from files or pre-loaded account * Load authentication information from files or pre-loaded account
@ -26,20 +26,20 @@ class AuthLoader {
.required(); .required();
const accountsJoi = joi.array() const accountsJoi = joi.array()
.items({ .items({
name: joi.string().required(), name: joi.string().required(),
email: joi.string().email().required(), email: joi.string().email().required(),
arn: joi.string().required(), arn: joi.string().required(),
canonicalID: joi.string().required(), canonicalID: joi.string().required(),
shortid: joi.string().regex(/^[0-9]{12}$/).required(), shortid: joi.string().regex(/^[0-9]{12}$/).required(),
keys: this._joiKeysValidator, keys: this._joiKeysValidator,
// backward-compat // backward-compat
users: joi.array(), users: joi.array(),
}) })
.required() .required()
.unique('arn') .unique('arn')
.unique('email') .unique('email')
.unique('canonicalID'); .unique('canonicalID');
this._joiValidator = joi.object({ accounts: accountsJoi }); this._joiValidator = joi.object({ accounts: accountsJoi });
} }
@ -136,7 +136,7 @@ class AuthLoader {
_validateData(authData, filePath) { _validateData(authData, filePath) {
const res = joi.validate(authData, this._joiValidator, const res = joi.validate(authData, this._joiValidator,
{ abortEarly: false }); { abortEarly: false });
if (res.error) { if (res.error) {
this._dumpJoiErrors(res.error.details, filePath); this._dumpJoiErrors(res.error.details, filePath);
return false; return false;
@ -156,7 +156,7 @@ class AuthLoader {
'master/conf/authdata.json). Also note that support ' + 'master/conf/authdata.json). Also note that support ' +
'for account users has been dropped.', 'for account users has been dropped.',
{ accountName: account.name, accountArn: account.arn, { accountName: account.name, accountArn: account.arn,
filePath }); filePath });
arnError = true; arnError = true;
return; return;
} }
@ -167,7 +167,7 @@ class AuthLoader {
'https://github.com/scality/S3/blob/master/conf/' + 'https://github.com/scality/S3/blob/master/conf/' +
'authdata.json)', 'authdata.json)',
{ accountName: account.name, accountArn: account.arn, { accountName: account.name, accountArn: account.arn,
filePath }); filePath });
arnError = true; arnError = true;
return; return;
} }
@ -176,8 +176,8 @@ class AuthLoader {
this._log.error( this._log.error(
'authentication config validation error', 'authentication config validation error',
{ reason: arnObj.error.description, { reason: arnObj.error.description,
accountName: account.name, accountArn: account.arn, accountName: account.name, accountArn: account.arn,
filePath }); filePath });
arnError = true; arnError = true;
return; return;
} }
@ -185,8 +185,8 @@ class AuthLoader {
this._log.error( this._log.error(
'authentication config validation error', 'authentication config validation error',
{ reason: 'not an IAM account ARN', { reason: 'not an IAM account ARN',
accountName: account.name, accountArn: account.arn, accountName: account.name, accountArn: account.arn,
filePath }); filePath });
arnError = true; arnError = true;
return; return;
} }
@ -215,7 +215,7 @@ class AuthLoader {
logInfo.context = err.context; logInfo.context = err.context;
} }
this._log.error('authentication config validation error', this._log.error('authentication config validation error',
logInfo); logInfo);
}); });
} }
} }

View File

@ -2,11 +2,10 @@
const crypto = require('crypto'); const crypto = require('crypto');
const errors = require('../../../errors'); const errors = require('../../errors');
const calculateSigningKey = require('./vaultUtilities').calculateSigningKey; const calculateSigningKey = require('./vaultUtilities').calculateSigningKey;
const hashSignature = require('./vaultUtilities').hashSignature; const hashSignature = require('./vaultUtilities').hashSignature;
const Indexer = require('./Indexer'); const Indexer = require('./Indexer');
const BaseBackend = require('../base');
function _formatResponse(userInfoToSend) { function _formatResponse(userInfoToSend) {
return { return {
@ -20,9 +19,9 @@ function _formatResponse(userInfoToSend) {
* Class that provides a memory backend for verifying signatures and getting * Class that provides a memory backend for verifying signatures and getting
* emails and canonical ids associated with an account. * emails and canonical ids associated with an account.
* *
* @class InMemoryBackend * @class Backend
*/ */
class InMemoryBackend extends BaseBackend { class Backend {
/** /**
* @constructor * @constructor
* @param {string} service - service identifer for construction arn * @param {string} service - service identifer for construction arn
@ -31,11 +30,19 @@ class InMemoryBackend extends BaseBackend {
* back and returns it in an object * back and returns it in an object
*/ */
constructor(service, indexer, formatter) { constructor(service, indexer, formatter) {
super(service); this.service = service;
this.indexer = indexer; this.indexer = indexer;
this.formatResponse = formatter; this.formatResponse = formatter;
} }
/** verifySignatureV2
* @param {string} stringToSign - string to sign built per AWS rules
* @param {string} signatureFromRequest - signature sent with request
* @param {string} accessKey - account accessKey
* @param {object} options - contains algorithm (SHA1 or SHA256)
* @param {function} callback - callback with either error or user info
* @return {function} calls callback
*/
verifySignatureV2(stringToSign, signatureFromRequest, verifySignatureV2(stringToSign, signatureFromRequest,
accessKey, options, callback) { accessKey, options, callback) {
const entity = this.indexer.getEntityByKey(accessKey); const entity = this.indexer.getEntityByKey(accessKey);
@ -58,6 +65,18 @@ class InMemoryBackend extends BaseBackend {
return callback(null, vaultReturnObject); return callback(null, vaultReturnObject);
} }
/** verifySignatureV4
* @param {string} stringToSign - string to sign built per AWS rules
* @param {string} signatureFromRequest - signature sent with request
* @param {string} accessKey - account accessKey
* @param {string} region - region specified in request credential
* @param {string} scopeDate - date specified in request credential
* @param {object} options - options to send to Vault
* (just contains reqUid for logging in Vault)
* @param {function} callback - callback with either error or user info
* @return {function} calls callback
*/
verifySignatureV4(stringToSign, signatureFromRequest, accessKey, verifySignatureV4(stringToSign, signatureFromRequest, accessKey,
region, scopeDate, options, callback) { region, scopeDate, options, callback) {
const entity = this.indexer.getEntityByKey(accessKey); const entity = this.indexer.getEntityByKey(accessKey);
@ -81,6 +100,16 @@ class InMemoryBackend extends BaseBackend {
return callback(null, vaultReturnObject); return callback(null, vaultReturnObject);
} }
/**
* Gets canonical ID's for a list of accounts
* based on email associated with account
* @param {array} emails - list of email addresses
* @param {object} log - log object
* @param {function} cb - callback to calling function
* @returns {function} callback with either error or
* object with email addresses as keys and canonical IDs
* as values
*/
getCanonicalIds(emails, log, cb) { getCanonicalIds(emails, log, cb) {
const results = {}; const results = {};
emails.forEach(email => { emails.forEach(email => {
@ -101,6 +130,16 @@ class InMemoryBackend extends BaseBackend {
return cb(null, vaultReturnObject); return cb(null, vaultReturnObject);
} }
/**
* Gets email addresses (referred to as diplay names for getACL's)
* for a list of accounts based on canonical IDs associated with account
* @param {array} canonicalIDs - list of canonicalIDs
* @param {object} options - to send log id to vault
* @param {function} cb - callback to calling function
* @returns {function} callback with either error or
* an object from Vault containing account canonicalID
* as each object key and an email address as the value (or "NotFound")
*/
getEmailAddresses(canonicalIDs, options, cb) { getEmailAddresses(canonicalIDs, options, cb) {
const results = {}; const results = {};
canonicalIDs.forEach(canonicalId => { canonicalIDs.forEach(canonicalId => {
@ -149,7 +188,7 @@ class InMemoryBackend extends BaseBackend {
} }
class S3AuthBackend extends InMemoryBackend { class S3AuthBackend extends Backend {
/** /**
* @constructor * @constructor
* @param {object} authdata - the authentication config file's data * @param {object} authdata - the authentication config file's data

View File

@ -41,7 +41,7 @@ function getCanonicalizedAmzHeaders(headers, clientType) {
// Build headerString // Build headerString
return amzHeaders.reduce((headerStr, current) => return amzHeaders.reduce((headerStr, current) =>
`${headerStr}${current[0]}:${current[1]}\n`, `${headerStr}${current[0]}:${current[1]}\n`,
''); '');
} }
module.exports = getCanonicalizedAmzHeaders; module.exports = getCanonicalizedAmzHeaders;

View File

@ -22,9 +22,9 @@ function check(request, log, data) {
timestamp = Date.parse(timestamp); timestamp = Date.parse(timestamp);
if (!timestamp) { if (!timestamp) {
log.debug('missing or invalid date header', log.debug('missing or invalid date header',
{ method: 'auth/v2/headerAuthCheck.check' }); { method: 'auth/v2/headerAuthCheck.check' });
return { err: errors.AccessDenied. return { err: errors.AccessDenied.
customizeDescription('Authentication requires a valid Date or ' + customizeDescription('Authentication requires a valid Date or ' +
'x-amz-date header') }; 'x-amz-date header') };
} }

View File

@ -42,12 +42,12 @@ function check(request, log, data) {
if (expirationTime > currentTime + preSignedURLExpiry) { if (expirationTime > currentTime + preSignedURLExpiry) {
log.debug('expires parameter too far in future', log.debug('expires parameter too far in future',
{ expires: request.query.Expires }); { expires: request.query.Expires });
return { err: errors.AccessDenied }; return { err: errors.AccessDenied };
} }
if (currentTime > expirationTime) { if (currentTime > expirationTime) {
log.debug('current time exceeds expires time', log.debug('current time exceeds expires time',
{ expires: request.query.Expires }); { expires: request.query.Expires });
return { err: errors.RequestTimeTooSkewed }; return { err: errors.RequestTimeTooSkewed };
} }
const accessKey = data.AWSAccessKeyId; const accessKey = data.AWSAccessKeyId;

View File

@ -43,7 +43,7 @@ function awsURIencode(input, encodeSlash, noEncodeStar) {
return encoded; return encoded;
} }
for (let i = 0; i < input.length; i++) { for (let i = 0; i < input.length; i++) {
let ch = input.charAt(i); const ch = input.charAt(i);
if ((ch >= 'A' && ch <= 'Z') || if ((ch >= 'A' && ch <= 'Z') ||
(ch >= 'a' && ch <= 'z') || (ch >= 'a' && ch <= 'z') ||
(ch >= '0' && ch <= '9') || (ch >= '0' && ch <= '9') ||
@ -57,20 +57,6 @@ function awsURIencode(input, encodeSlash, noEncodeStar) {
} else if (ch === '*') { } else if (ch === '*') {
encoded = encoded.concat(noEncodeStar ? '*' : '%2A'); encoded = encoded.concat(noEncodeStar ? '*' : '%2A');
} else { } else {
if (ch >= '\uD800' && ch <= '\uDBFF') {
// If this character is a high surrogate peek the next character
// and join it with this one if the next character is a low
// surrogate.
// Otherwise the encoded URI will contain the two surrogates as
// two distinct UTF-8 sequences which is not valid UTF-8.
if (i + 1 < input.length) {
const ch2 = input.charAt(i + 1);
if (ch2 >= '\uDC00' && ch2 <= '\uDFFF') {
i++;
ch += ch2;
}
}
}
encoded = encoded.concat(_toHexUTF8(ch)); encoded = encoded.concat(_toHexUTF8(ch));
} }
} }

View File

@ -88,14 +88,14 @@ function check(request, log, data, awsService) {
} }
if (!timestamp) { if (!timestamp) {
log.debug('missing or invalid date header', log.debug('missing or invalid date header',
{ method: 'auth/v4/headerAuthCheck.check' }); { method: 'auth/v4/headerAuthCheck.check' });
return { err: errors.AccessDenied. return { err: errors.AccessDenied.
customizeDescription('Authentication requires a valid Date or ' + customizeDescription('Authentication requires a valid Date or ' +
'x-amz-date header') }; 'x-amz-date header') };
} }
const validationResult = validateCredentials(credentialsArr, timestamp, const validationResult = validateCredentials(credentialsArr, timestamp,
log); log);
if (validationResult instanceof Error) { if (validationResult instanceof Error) {
log.debug('credentials in improper format', { credentialsArr, log.debug('credentials in improper format', { credentialsArr,
timestamp, validationResult }); timestamp, validationResult });
@ -127,17 +127,6 @@ function check(request, log, data, awsService) {
return { err: errors.RequestTimeTooSkewed }; return { err: errors.RequestTimeTooSkewed };
} }
let proxyPath = null;
if (request.headers.proxy_path) {
try {
proxyPath = decodeURIComponent(request.headers.proxy_path);
} catch (err) {
log.debug('invalid proxy_path header', { proxyPath, err });
return { err: errors.InvalidArgument.customizeDescription(
'invalid proxy_path header') };
}
}
const stringToSign = constructStringToSign({ const stringToSign = constructStringToSign({
log, log,
request, request,
@ -147,7 +136,6 @@ function check(request, log, data, awsService) {
timestamp, timestamp,
payloadChecksum, payloadChecksum,
awsService: service, awsService: service,
proxyPath,
}); });
log.trace('constructed stringToSign', { stringToSign }); log.trace('constructed stringToSign', { stringToSign });
if (stringToSign instanceof Error) { if (stringToSign instanceof Error) {

View File

@ -45,7 +45,7 @@ function check(request, log, data) {
} }
const validationResult = validateCredentials(credential, timestamp, const validationResult = validateCredentials(credential, timestamp,
log); log);
if (validationResult instanceof Error) { if (validationResult instanceof Error) {
log.debug('credentials in improper format', { credential, log.debug('credentials in improper format', { credential,
timestamp, validationResult }); timestamp, validationResult });
@ -62,17 +62,6 @@ function check(request, log, data) {
return { err: errors.RequestTimeTooSkewed }; return { err: errors.RequestTimeTooSkewed };
} }
let proxyPath = null;
if (request.headers.proxy_path) {
try {
proxyPath = decodeURIComponent(request.headers.proxy_path);
} catch (err) {
log.debug('invalid proxy_path header', { proxyPath });
return { err: errors.InvalidArgument.customizeDescription(
'invalid proxy_path header') };
}
}
// In query v4 auth, the canonical request needs // In query v4 auth, the canonical request needs
// to include the query params OTHER THAN // to include the query params OTHER THAN
// the signature so create a // the signature so create a
@ -98,7 +87,6 @@ function check(request, log, data) {
credentialScope: credentialScope:
`${scopeDate}/${region}/${service}/${requestType}`, `${scopeDate}/${region}/${service}/${requestType}`,
awsService: service, awsService: service,
proxyPath,
}); });
if (stringToSign instanceof Error) { if (stringToSign instanceof Error) {
return { err: stringToSign }; return { err: stringToSign };

View File

@ -25,20 +25,20 @@ function validateCredentials(credentials, timestamp, log) {
log.warn('accessKey provided is wrong format', { accessKey }); log.warn('accessKey provided is wrong format', { accessKey });
return errors.InvalidArgument; return errors.InvalidArgument;
} }
// The scope date (format YYYYMMDD) must be same date as the timestamp // The scope date (format YYYYMMDD) must be same date as the timestamp
// on the request from the x-amz-date param (if queryAuthCheck) // on the request from the x-amz-date param (if queryAuthCheck)
// or from the x-amz-date header or date header (if headerAuthCheck) // or from the x-amz-date header or date header (if headerAuthCheck)
// Format of timestamp is ISO 8601: YYYYMMDDTHHMMSSZ. // Format of timestamp is ISO 8601: YYYYMMDDTHHMMSSZ.
// http://docs.aws.amazon.com/AmazonS3/latest/API/ // http://docs.aws.amazon.com/AmazonS3/latest/API/
// sigv4-query-string-auth.html // sigv4-query-string-auth.html
// http://docs.aws.amazon.com/general/latest/gr/ // http://docs.aws.amazon.com/general/latest/gr/
// sigv4-date-handling.html // sigv4-date-handling.html
// convert timestamp to format of scopeDate YYYYMMDD // convert timestamp to format of scopeDate YYYYMMDD
const timestampDate = timestamp.split('T')[0]; const timestampDate = timestamp.split('T')[0];
if (scopeDate.length !== 8 || scopeDate !== timestampDate) { if (scopeDate.length !== 8 || scopeDate !== timestampDate) {
log.warn('scope date must be the same date as the timestamp date', log.warn('scope date must be the same date as the timestamp date',
{ scopeDate, timestampDate }); { scopeDate, timestampDate });
return errors.RequestTimeTooSkewed; return errors.RequestTimeTooSkewed;
} }
if (service !== 's3' && service !== 'iam' && service !== 'ring' && if (service !== 's3' && service !== 'iam' && service !== 'ring' &&
@ -50,7 +50,7 @@ function validateCredentials(credentials, timestamp, log) {
} }
if (requestType !== 'aws4_request') { if (requestType !== 'aws4_request') {
log.warn('requestType contained in params is not aws4_request', log.warn('requestType contained in params is not aws4_request',
{ requestType }); { requestType });
return errors.InvalidArgument; return errors.InvalidArgument;
} }
return {}; return {};
@ -68,7 +68,7 @@ function extractQueryParams(queryObj, log) {
// Do not need the algorithm sent back // Do not need the algorithm sent back
if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') { if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') {
log.warn('algorithm param incorrect', log.warn('algorithm param incorrect',
{ algo: queryObj['X-Amz-Algorithm'] }); { algo: queryObj['X-Amz-Algorithm'] });
return authParams; return authParams;
} }

View File

@ -1,21 +1,20 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const crypto = require('crypto');
// The min value here is to manage further backward compat if we // The min value here is to manage further backward compat if we
// need it // need it
// Default value const iamSecurityTokenSizeMin = 128;
const vaultGeneratedIamSecurityTokenSizeMin = 128; const iamSecurityTokenSizeMax = 128;
// Safe to assume that a typical token size is less than 8192 bytes // Security token is an hex string (no real format from amazon)
const vaultGeneratedIamSecurityTokenSizeMax = 8192; const iamSecurityTokenPattern =
// Base-64 new RegExp(`^[a-f0-9]{${iamSecurityTokenSizeMin},` +
const vaultGeneratedIamSecurityTokenPattern = /^[A-Za-z0-9/+=]*$/; `${iamSecurityTokenSizeMax}}$`);
module.exports = { module.exports = {
// info about the iam security token // info about the iam security token
iamSecurityToken: { iamSecurityToken: {
min: vaultGeneratedIamSecurityTokenSizeMin, min: iamSecurityTokenSizeMin,
max: vaultGeneratedIamSecurityTokenSizeMax, max: iamSecurityTokenSizeMax,
pattern: vaultGeneratedIamSecurityTokenPattern, pattern: iamSecurityTokenPattern,
}, },
// PublicId is used as the canonicalID for a request that contains // PublicId is used as the canonicalID for a request that contains
// no authentication information. Requestor can access // no authentication information. Requestor can access
@ -24,7 +23,6 @@ module.exports = {
zenkoServiceAccount: 'http://acs.zenko.io/accounts/service', zenkoServiceAccount: 'http://acs.zenko.io/accounts/service',
metadataFileNamespace: '/MDFile', metadataFileNamespace: '/MDFile',
dataFileURL: '/DataFile', dataFileURL: '/DataFile',
passthroughFileURL: '/PassthroughFile',
// AWS states max size for user-defined metadata // AWS states max size for user-defined metadata
// (x-amz-meta- headers) is 2 KB: // (x-amz-meta- headers) is 2 KB:
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html // http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
@ -34,10 +32,7 @@ module.exports = {
emptyFileMd5: 'd41d8cd98f00b204e9800998ecf8427e', emptyFileMd5: 'd41d8cd98f00b204e9800998ecf8427e',
// Version 2 changes the format of the data location property // Version 2 changes the format of the data location property
// Version 3 adds the dataStoreName attribute // Version 3 adds the dataStoreName attribute
// Version 4 add the Creation-Time and Content-Language attributes, mdModelVersion: 3,
// and add support for x-ms-meta-* headers in UserMetadata
// Version 5 adds the azureInfo structure
mdModelVersion: 5,
/* /*
* Splitter is used to build the object name for the overview of a * Splitter is used to build the object name for the overview of a
* multipart upload and to build the object names for each part of a * multipart upload and to build the object names for each part of a
@ -77,44 +72,9 @@ module.exports = {
permittedCapitalizedBuckets: { permittedCapitalizedBuckets: {
METADATA: true, METADATA: true,
}, },
// Setting a lower object key limit to account for:
// - Mongo key limit of 1012 bytes
// - Version ID in Mongo Key if versioned of 33
// - Max bucket name length if bucket match false of 63
// - Extra prefix slash for bucket prefix if bucket match of 1
objectKeyByteLimit: 915,
/* delimiter for location-constraint. The location constraint will be able
* to include the ingestion flag
*/
zenkoSeparator: ':',
/* eslint-disable camelcase */ /* eslint-disable camelcase */
externalBackends: { aws_s3: true, azure: true, gcp: true, pfs: true }, externalBackends: { aws_s3: true, azure: true, gcp: true, pfs: true },
replicationBackends: { aws_s3: true, azure: true, gcp: true }, /* eslint-enable camelcase */
// hex digest of sha256 hash of empty string:
emptyStringHash: crypto.createHash('sha256')
.update('', 'binary').digest('hex'),
mpuMDStoredExternallyBackend: { aws_s3: true, gcp: true },
// AWS sets a minimum size limit for parts except for the last part.
// http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
minimumAllowedPartSize: 5242880,
gcpMaximumAllowedPartCount: 1024,
// GCP Object Tagging Prefix
gcpTaggingPrefix: 'aws-tag-',
productName: 'APN/1.0 Scality/1.0 Scality CloudServer for Zenko',
legacyLocations: ['sproxyd', 'legacy'],
// healthcheck default call from nginx is every 2 seconds
// for external backends, don't call unless at least 1 minute
// (60,000 milliseconds) since last call
externalBackendHealthCheckInterval: 60000,
// some of the available data backends (if called directly rather
// than through the multiple backend gateway) need a key provided
// as a string as first parameter of the get/delete methods.
clientsRequireStringKey: { sproxyd: true, cdmi: true },
hasCopyPartBackends: { aws_s3: true, gcp: true },
versioningNotImplBackends: { azure: true, gcp: true },
// user metadata applied on zenko-created objects
zenkoIDHeader: 'x-amz-meta-zenko-instance-id',
// Default expiration value of the S3 pre-signed URL duration // Default expiration value of the S3 pre-signed URL duration
// 604800 seconds (seven days). // 604800 seconds (seven days).
defaultPreSignedURLExpiry: 7 * 24 * 60 * 60, defaultPreSignedURLExpiry: 7 * 24 * 60 * 60,
@ -131,6 +91,10 @@ module.exports = {
's3:ObjectRemoved:DeleteMarkerCreated', 's3:ObjectRemoved:DeleteMarkerCreated',
]), ]),
notificationArnPrefix: 'arn:scality:bucketnotif', notificationArnPrefix: 'arn:scality:bucketnotif',
// some of the available data backends (if called directly rather
// than through the multiple backend gateway) need a key provided
// as a string as first parameter of the get/delete methods.
clientsRequireStringKey: { sproxyd: true, cdmi: true },
// HTTP server keep-alive timeout is set to a higher value than // HTTP server keep-alive timeout is set to a higher value than
// client's free sockets timeout to avoid the risk of triggering // client's free sockets timeout to avoid the risk of triggering
// ECONNRESET errors if the server closes the connection at the // ECONNRESET errors if the server closes the connection at the

View File

@ -64,12 +64,12 @@ class IndexTransaction {
push(op) { push(op) {
if (this.closed) { if (this.closed) {
throw propError('pushOnCommittedTransaction', throw propError('pushOnCommittedTransaction',
'can not add ops to already committed transaction'); 'can not add ops to already committed transaction');
} }
if (op.type !== 'put' && op.type !== 'del') { if (op.type !== 'put' && op.type !== 'del') {
throw propError('invalidTransactionVerb', throw propError('invalidTransactionVerb',
`unknown action type: ${op.type}`); `unknown action type: ${op.type}`);
} }
if (op.key === undefined) { if (op.key === undefined) {
@ -137,7 +137,7 @@ class IndexTransaction {
addCondition(condition) { addCondition(condition) {
if (this.closed) { if (this.closed) {
throw propError('pushOnCommittedTransaction', throw propError('pushOnCommittedTransaction',
'can not add conditions to already committed transaction'); 'can not add conditions to already committed transaction');
} }
if (condition === undefined || Object.keys(condition).length === 0) { if (condition === undefined || Object.keys(condition).length === 0) {
throw propError('missingCondition', 'missing condition for conditional put'); throw propError('missingCondition', 'missing condition for conditional put');
@ -159,12 +159,12 @@ class IndexTransaction {
commit(cb) { commit(cb) {
if (this.closed) { if (this.closed) {
return cb(propError('alreadyCommitted', return cb(propError('alreadyCommitted',
'transaction was already committed')); 'transaction was already committed'));
} }
if (this.operations.length === 0) { if (this.operations.length === 0) {
return cb(propError('emptyTransaction', return cb(propError('emptyTransaction',
'tried to commit an empty transaction')); 'tried to commit an empty transaction'));
} }
this.closed = true; this.closed = true;

View File

@ -76,11 +76,11 @@ function errorsGen() {
const errorsObj = require('../errors/arsenalErrors.json'); const errorsObj = require('../errors/arsenalErrors.json');
Object.keys(errorsObj) Object.keys(errorsObj)
.filter(index => index !== '_comment') .filter(index => index !== '_comment')
.forEach(index => { .forEach(index => {
errors[index] = new ArsenalError(index, errorsObj[index].code, errors[index] = new ArsenalError(index, errorsObj[index].code,
errorsObj[index].description); errorsObj[index].description);
}); });
return errors; return errors;
} }

View File

@ -7,8 +7,8 @@
"test": "mocha --recursive --timeout 5500 tests/unit" "test": "mocha --recursive --timeout 5500 tests/unit"
}, },
"dependencies": { "dependencies": {
"mocha": "5.2.0", "mocha": "2.5.3",
"async": "~2.6.1", "async": "^2.6.0",
"node-forge": "^0.7.1" "node-forge": "^0.7.1"
} }
} }

View File

@ -17,9 +17,9 @@ describe('decyrptSecret', () => {
describe('parseServiceCredentials', () => { describe('parseServiceCredentials', () => {
const conf = { const conf = {
users: [{ accessKey, users: [{ accessKey,
accountType: 'service-clueso', accountType: 'service-clueso',
secretKey, secretKey,
userName: 'Search Service Account' }], userName: 'Search Service Account' }],
}; };
const auth = JSON.stringify({ privateKey }); const auth = JSON.stringify({ privateKey });

View File

@ -25,7 +25,7 @@ module.exports.once = function once(func) {
state.res = func.apply(func, args); state.res = func.apply(func, args);
} else { } else {
debug('function already called:', func, debug('function already called:', func,
'returning cached result:', state.res); 'returning cached result:', state.res);
} }
return state.res; return state.res;
}; };

View File

@ -17,33 +17,11 @@ class RedisClient {
method: 'RedisClient.constructor', method: 'RedisClient.constructor',
redisHost: config.host, redisHost: config.host,
redisPort: config.port, redisPort: config.port,
}), })
); );
return this; return this;
} }
/**
* scan a pattern and return matching keys
* @param {string} pattern - string pattern to match with all existing keys
* @param {number} [count=10] - scan count
* @param {callback} cb - callback (error, result)
* @return {undefined}
*/
scan(pattern, count = 10, cb) {
const params = { match: pattern, count };
const keys = [];
const stream = this._client.scanStream(params);
stream.on('data', resultKeys => {
for (let i = 0; i < resultKeys.length; i++) {
keys.push(resultKeys[i]);
}
});
stream.on('end', () => {
cb(null, keys);
});
}
/** /**
* increment value of a key by 1 and set a ttl * increment value of a key by 1 and set a ttl
* @param {string} key - key holding the value * @param {string} key - key holding the value
@ -57,17 +35,6 @@ class RedisClient {
.exec(cb); .exec(cb);
} }
/**
* increment value of a key by a given amount
* @param {string} key - key holding the value
* @param {number} amount - amount to increase by
* @param {callback} cb - callback
* @return {undefined}
*/
incrby(key, amount, cb) {
return this._client.incrby(key, amount, cb);
}
/** /**
* increment value of a key by a given amount and set a ttl * increment value of a key by a given amount and set a ttl
* @param {string} key - key holding the value * @param {string} key - key holding the value
@ -83,24 +50,13 @@ class RedisClient {
} }
/** /**
* decrement value of a key by a given amount * execute a batch of commands
* @param {string} key - key holding the value * @param {string[]} cmds - list of commands
* @param {number} amount - amount to increase by * @param {callback} cb - callback
* @param {callback} cb - callback * @return {undefined}
* @return {undefined} */
*/ batch(cmds, cb) {
decrby(key, amount, cb) { return this._client.pipeline(cmds).exec(cb);
return this._client.decrby(key, amount, cb);
}
/**
* get value stored at key
* @param {string} key - key holding the value
* @param {callback} cb - callback
* @return {undefined}
*/
get(key, cb) {
return this._client.get(key, cb);
} }
/** /**
@ -115,16 +71,6 @@ class RedisClient {
return this._client.exists(key, cb); return this._client.exists(key, cb);
} }
/**
* execute a batch of commands
* @param {string[]} cmds - list of commands
* @param {callback} cb - callback
* @return {undefined}
*/
batch(cmds, cb) {
return this._client.pipeline(cmds).exec(cb);
}
/** /**
* Add a value and its score to a sorted set. If no sorted set exists, this * Add a value and its score to a sorted set. If no sorted set exists, this
* will create a new one for the given key. * will create a new one for the given key.
@ -204,26 +150,12 @@ class RedisClient {
return this._client.zrangebyscore(key, min, max, cb); return this._client.zrangebyscore(key, min, max, cb);
} }
/**
* get TTL or expiration in seconds
* @param {string} key - name of key
* @param {function} cb - callback
* @return {undefined}
*/
ttl(key, cb) {
return this._client.ttl(key, cb);
}
clear(cb) { clear(cb) {
return this._client.flushdb(cb); return this._client.flushdb(cb);
} }
disconnect(cb) { disconnect() {
return this._client.quit(cb); this._client.disconnect();
}
listClients(cb) {
return this._client.client('list', cb);
} }
} }

View File

@ -41,11 +41,11 @@ class StatsClient {
/** /**
* build redis key to get total number of occurrences on the server * build redis key to get total number of occurrences on the server
* @param {string} name - key name identifier * @param {string} name - key name identifier
* @param {Date} date - Date instance * @param {object} d - Date instance
* @return {string} key - key for redis * @return {string} key - key for redis
*/ */
buildKey(name, date) { _buildKey(name, d) {
return `${name}:${this._normalizeTimestamp(date)}`; return `${name}:${this._normalizeTimestamp(d)}`;
} }
/** /**
@ -85,35 +85,11 @@ class StatsClient {
amount = (typeof incr === 'number') ? incr : 1; amount = (typeof incr === 'number') ? incr : 1;
} }
const key = this.buildKey(`${id}:requests`, new Date()); const key = this._buildKey(`${id}:requests`, new Date());
return this._redis.incrbyEx(key, amount, this._expiry, callback); return this._redis.incrbyEx(key, amount, this._expiry, callback);
} }
/**
* Increment the given key by the given value.
* @param {String} key - The Redis key to increment
* @param {Number} incr - The value to increment by
* @param {function} [cb] - callback
* @return {undefined}
*/
incrementKey(key, incr, cb) {
const callback = cb || this._noop;
return this._redis.incrby(key, incr, callback);
}
/**
* Decrement the given key by the given value.
* @param {String} key - The Redis key to decrement
* @param {Number} decr - The value to decrement by
* @param {function} [cb] - callback
* @return {undefined}
*/
decrementKey(key, decr, cb) {
const callback = cb || this._noop;
return this._redis.decrby(key, decr, callback);
}
/** /**
* report/record a request that ended up being a 500 on the server * report/record a request that ended up being a 500 on the server
* @param {string} id - service identifier * @param {string} id - service identifier
@ -125,54 +101,10 @@ class StatsClient {
return undefined; return undefined;
} }
const callback = cb || this._noop; const callback = cb || this._noop;
const key = this.buildKey(`${id}:500s`, new Date()); const key = this._buildKey(`${id}:500s`, new Date());
return this._redis.incrEx(key, this._expiry, callback); return this._redis.incrEx(key, this._expiry, callback);
} }
/**
* wrapper on `getStats` that handles a list of keys
* @param {object} log - Werelogs request logger
* @param {array} ids - service identifiers
* @param {callback} cb - callback to call with the err/result
* @return {undefined}
*/
getAllStats(log, ids, cb) {
if (!this._redis) {
return cb(null, {});
}
const statsRes = {
'requests': 0,
'500s': 0,
'sampleDuration': this._expiry,
};
let requests = 0;
let errors = 0;
// for now set concurrency to default of 10
return async.eachLimit(ids, 10, (id, done) => {
this.getStats(log, id, (err, res) => {
if (err) {
return done(err);
}
requests += res.requests;
errors += res['500s'];
return done();
});
}, error => {
if (error) {
log.error('error getting stats', {
error,
method: 'StatsClient.getAllStats',
});
return cb(null, statsRes);
}
statsRes.requests = requests;
statsRes['500s'] = errors;
return cb(null, statsRes);
});
}
/** /**
* get stats for the last x seconds, x being the sampling duration * get stats for the last x seconds, x being the sampling duration
* @param {object} log - Werelogs request logger * @param {object} log - Werelogs request logger
@ -189,8 +121,8 @@ class StatsClient {
const reqsKeys = []; const reqsKeys = [];
const req500sKeys = []; const req500sKeys = [];
for (let i = 0; i < totalKeys; i++) { for (let i = 0; i < totalKeys; i++) {
reqsKeys.push(['get', this.buildKey(`${id}:requests`, d)]); reqsKeys.push(['get', this._buildKey(`${id}:requests`, d)]);
req500sKeys.push(['get', this.buildKey(`${id}:500s`, d)]); req500sKeys.push(['get', this._buildKey(`${id}:500s`, d)]);
this._setPrevInterval(d); this._setPrevInterval(d);
} }
return async.parallel([ return async.parallel([

View File

@ -1,148 +1,11 @@
const async = require('async');
const StatsClient = require('./StatsClient'); const StatsClient = require('./StatsClient');
/**
/**
* @class StatsModel * @class StatsModel
* *
* @classdesc Extend and overwrite how timestamps are normalized by minutes * @classdesc Extend and overwrite how timestamps are normalized by minutes
* rather than by seconds * rather than by seconds
*/ */
class StatsModel extends StatsClient { class StatsModel extends StatsClient {
/**
* Utility method to convert 2d array rows to columns, and vice versa
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip
* @param {array} arrays - 2d array of integers
* @return {array} converted array
*/
_zip(arrays) {
if (arrays.length > 0 && arrays.every(a => Array.isArray(a))) {
return arrays[0].map((_, i) => arrays.map(a => a[i]));
}
return [];
}
/**
* normalize to the nearest interval
* @param {object} d - Date instance
* @return {number} timestamp - normalized to the nearest interval
*/
_normalizeTimestamp(d) {
const m = d.getMinutes();
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0);
}
/**
* override the method to get the count as an array of integers separated
* by each interval
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
* @param {array} arr - each index contains the result of each batch command
* where index 0 signifies the error and index 1 contains the result
* @return {array} array of integers, ordered from most recent interval to
* oldest interval with length of (expiry / interval)
*/
_getCount(arr) {
const size = Math.floor(this._expiry / this._interval);
const array = arr.reduce((store, i) => {
let num = parseInt(i[1], 10);
num = Number.isNaN(num) ? 0 : num;
store.push(num);
return store;
}, []);
if (array.length < size) {
array.push(...Array(size - array.length).fill(0));
}
return array;
}
/**
* wrapper on `getStats` that handles a list of keys
* override the method to reduce the returned 2d array from `_getCount`
* @param {object} log - Werelogs request logger
* @param {array} ids - service identifiers
* @param {callback} cb - callback to call with the err/result
* @return {undefined}
*/
getAllStats(log, ids, cb) {
if (!this._redis) {
return cb(null, {});
}
const size = Math.floor(this._expiry / this._interval);
const statsRes = {
'requests': Array(size).fill(0),
'500s': Array(size).fill(0),
'sampleDuration': this._expiry,
};
const requests = [];
const errors = [];
if (ids.length === 0) {
return cb(null, statsRes);
}
// for now set concurrency to default of 10
return async.eachLimit(ids, 10, (id, done) => {
this.getStats(log, id, (err, res) => {
if (err) {
return done(err);
}
requests.push(res.requests);
errors.push(res['500s']);
return done();
});
}, error => {
if (error) {
log.error('error getting stats', {
error,
method: 'StatsModel.getAllStats',
});
return cb(null, statsRes);
}
statsRes.requests = this._zip(requests).map(arr =>
arr.reduce((acc, i) => acc + i), 0);
statsRes['500s'] = this._zip(errors).map(arr =>
arr.reduce((acc, i) => acc + i), 0);
return cb(null, statsRes);
});
}
/**
* Handles getting a list of global keys.
* @param {array} ids - Service identifiers
* @param {object} log - Werelogs request logger
* @param {function} cb - Callback
* @return {undefined}
*/
getAllGlobalStats(ids, log, cb) {
const reqsKeys = ids.map(key => (['get', key]));
return this._redis.batch(reqsKeys, (err, res) => {
const statsRes = { requests: 0 };
if (err) {
log.error('error getting metrics', {
error: err,
method: 'StatsClient.getAllGlobalStats',
});
return cb(null, statsRes);
}
statsRes.requests = res.reduce((sum, curr) => {
const [cmdErr, val] = curr;
if (cmdErr) {
// Log any individual request errors from the batch request.
log.error('error getting metrics', {
error: cmdErr,
method: 'StatsClient.getAllGlobalStats',
});
}
return sum + (Number.parseInt(val, 10) || 0);
}, 0);
return cb(null, statsRes);
});
}
/** /**
* normalize date timestamp to the nearest hour * normalize date timestamp to the nearest hour
* @param {Date} d - Date instance * @param {Date} d - Date instance
@ -161,6 +24,34 @@ class StatsModel extends StatsClient {
return d.setHours(d.getHours() - 1); return d.setHours(d.getHours() - 1);
} }
/**
* normalize to the nearest interval
* @param {object} d - Date instance
* @return {number} timestamp - normalized to the nearest interval
*/
_normalizeTimestamp(d) {
const m = d.getMinutes();
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0);
}
/**
* override the method to get the result as an array of integers separated
* by each interval
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
* @param {array} arr - each index contains the result of each batch command
* where index 0 signifies the error and index 1 contains the result
* @return {array} array of integers, ordered from most recent interval to
* oldest interval
*/
_getCount(arr) {
return arr.reduce((store, i) => {
let num = parseInt(i[1], 10);
num = Number.isNaN(num) ? 0 : num;
store.push(num);
return store;
}, []);
}
/** /**
* get list of sorted set key timestamps * get list of sorted set key timestamps
* @param {number} epoch - epoch time * @param {number} epoch - epoch time

View File

@ -2,8 +2,8 @@ const promClient = require('prom-client');
const collectDefaultMetricsIntervalMs = const collectDefaultMetricsIntervalMs =
process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined ? process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined ?
Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10) : Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10) :
10000; 10000;
promClient.collectDefaultMetrics({ timeout: collectDefaultMetricsIntervalMs }); promClient.collectDefaultMetrics({ timeout: collectDefaultMetricsIntervalMs });

View File

@ -27,7 +27,7 @@ class ARN {
static createFromString(arnStr) { static createFromString(arnStr) {
const [arn, partition, service, region, accountId, const [arn, partition, service, region, accountId,
resourceType, resource] = arnStr.split(':'); resourceType, resource] = arnStr.split(':');
if (arn !== 'arn') { if (arn !== 'arn') {
return { error: errors.InvalidArgument.customizeDescription( return { error: errors.InvalidArgument.customizeDescription(
@ -58,7 +58,7 @@ class ARN {
'must be a 12-digit number or "*"') }; 'must be a 12-digit number or "*"') };
} }
const fullResource = (resource !== undefined ? const fullResource = (resource !== undefined ?
`${resourceType}:${resource}` : resourceType); `${resourceType}:${resource}` : resourceType);
return new ARN(partition, service, region, accountId, fullResource); return new ARN(partition, service, region, accountId, fullResource);
} }
@ -98,7 +98,7 @@ class ARN {
toString() { toString() {
return ['arn', this.getPartition(), this.getService(), return ['arn', this.getPartition(), this.getService(),
this.getRegion(), this.getAccountId(), this.getResource()] this.getRegion(), this.getAccountId(), this.getResource()]
.join(':'); .join(':');
} }
} }

View File

@ -1,237 +0,0 @@
/**
* Helper class to ease access to the Azure specific information for
* storage accounts mapped to buckets.
*/
class BucketAzureInfo {
/**
* @constructor
* @param {object} obj - Raw structure for the Azure info on storage account
* @param {string} obj.sku - SKU name of this storage account
* @param {string} obj.accessTier - Access Tier name of this storage account
* @param {string} obj.kind - Kind name of this storage account
* @param {string[]} obj.systemKeys - pair of shared keys for the system
* @param {string[]} obj.tenantKeys - pair of shared keys for the tenant
* @param {string} obj.subscriptionId - subscription ID the storage account
* belongs to
* @param {string} obj.resourceGroup - Resource group name the storage
* account belongs to
* @param {object} obj.deleteRetentionPolicy - Delete retention policy
* @param {boolean} obj.deleteRetentionPolicy.enabled -
* @param {number} obj.deleteRetentionPolicy.days -
* @param {object[]} obj.managementPolicies - Management policies for this
* storage account
* @param {boolean} obj.httpsOnly - Server the content of this storage
* account through HTTPS only
* @param {object} obj.tags - Set of tags applied on this storage account
* @param {object[]} obj.networkACL - Network ACL of this storage account
* @param {string} obj.cname - CNAME of this storage account
* @param {boolean} obj.azureFilesAADIntegration - whether or not Azure
* Files AAD Integration is enabled for this storage account
* @param {boolean} obj.hnsEnabled - whether or not a hierarchical namespace
* is enabled for this storage account
* @param {object} obj.logging - service properties: logging
* @param {object} obj.hourMetrics - service properties: hourMetrics
* @param {object} obj.minuteMetrics - service properties: minuteMetrics
* @param {string} obj.serviceVersion - service properties: serviceVersion
*/
constructor(obj) {
this._data = {
sku: obj.sku,
accessTier: obj.accessTier,
kind: obj.kind,
systemKeys: obj.systemKeys,
tenantKeys: obj.tenantKeys,
subscriptionId: obj.subscriptionId,
resourceGroup: obj.resourceGroup,
deleteRetentionPolicy: obj.deleteRetentionPolicy,
managementPolicies: obj.managementPolicies,
httpsOnly: obj.httpsOnly,
tags: obj.tags,
networkACL: obj.networkACL,
cname: obj.cname,
azureFilesAADIntegration: obj.azureFilesAADIntegration,
hnsEnabled: obj.hnsEnabled,
logging: obj.logging,
hourMetrics: obj.hourMetrics,
minuteMetrics: obj.minuteMetrics,
serviceVersion: obj.serviceVersion,
};
}
getSku() {
return this._data.sku;
}
setSku(sku) {
this._data.sku = sku;
return this;
}
getAccessTier() {
return this._data.accessTier;
}
setAccessTier(accessTier) {
this._data.accessTier = accessTier;
return this;
}
getKind() {
return this._data.kind;
}
setKind(kind) {
this._data.kind = kind;
return this;
}
getSystemKeys() {
return this._data.systemKeys;
}
setSystemKeys(systemKeys) {
this._data.systemKeys = systemKeys;
return this;
}
getTenantKeys() {
return this._data.tenantKeys;
}
setTenantKeys(tenantKeys) {
this._data.tenantKeys = tenantKeys;
return this;
}
getSubscriptionId() {
return this._data.subscriptionId;
}
setSubscriptionId(subscriptionId) {
this._data.subscriptionId = subscriptionId;
return this;
}
getResourceGroup() {
return this._data.resourceGroup;
}
setResourceGroup(resourceGroup) {
this._data.resourceGroup = resourceGroup;
return this;
}
getDeleteRetentionPolicy() {
return this._data.deleteRetentionPolicy;
}
setDeleteRetentionPolicy(deleteRetentionPolicy) {
this._data.deleteRetentionPolicy = deleteRetentionPolicy;
return this;
}
getManagementPolicies() {
return this._data.managementPolicies;
}
setManagementPolicies(managementPolicies) {
this._data.managementPolicies = managementPolicies;
return this;
}
getHttpsOnly() {
return this._data.httpsOnly;
}
setHttpsOnly(httpsOnly) {
this._data.httpsOnly = httpsOnly;
return this;
}
getTags() {
return this._data.tags;
}
setTags(tags) {
this._data.tags = tags;
return this;
}
getNetworkACL() {
return this._data.networkACL;
}
setNetworkACL(networkACL) {
this._data.networkACL = networkACL;
return this;
}
getCname() {
return this._data.cname;
}
setCname(cname) {
this._data.cname = cname;
return this;
}
getAzureFilesAADIntegration() {
return this._data.azureFilesAADIntegration;
}
setAzureFilesAADIntegration(azureFilesAADIntegration) {
this._data.azureFilesAADIntegration = azureFilesAADIntegration;
return this;
}
getHnsEnabled() {
return this._data.hnsEnabled;
}
setHnsEnabled(hnsEnabled) {
this._data.hnsEnabled = hnsEnabled;
return this;
}
getLogging() {
return this._data.logging;
}
setLogging(logging) {
this._data.logging = logging;
return this;
}
getHourMetrics() {
return this._data.hourMetrics;
}
setHourMetrics(hourMetrics) {
this._data.hourMetrics = hourMetrics;
return this;
}
getMinuteMetrics() {
return this._data.minuteMetrics;
}
setMinuteMetrics(minuteMetrics) {
this._data.minuteMetrics = minuteMetrics;
return this;
}
getServiceVersion() {
return this._data.serviceVersion;
}
setServiceVersion(serviceVersion) {
this._data.serviceVersion = serviceVersion;
return this;
}
getValue() {
return this._data;
}
}
module.exports = BucketAzureInfo;

View File

@ -9,9 +9,8 @@ const BucketPolicy = require('./BucketPolicy');
const NotificationConfiguration = require('./NotificationConfiguration'); const NotificationConfiguration = require('./NotificationConfiguration');
// WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG // WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG
// BucketInfoModelVersion.md can be found in documentation/ at the root // BucketInfoModelVersion.md can be found in the root of this repository
// of this repository const modelVersion = 10;
const modelVersion = 14;
class BucketInfo { class BucketInfo {
/** /**
@ -42,8 +41,7 @@ class BucketInfo {
* @param {object} versioningConfiguration - versioning configuration * @param {object} versioningConfiguration - versioning configuration
* @param {string} versioningConfiguration.Status - versioning status * @param {string} versioningConfiguration.Status - versioning status
* @param {object} versioningConfiguration.MfaDelete - versioning mfa delete * @param {object} versioningConfiguration.MfaDelete - versioning mfa delete
* @param {string} locationConstraint - locationConstraint for bucket that * @param {string} locationConstraint - locationConstraint for bucket
* also includes the ingestion flag
* @param {WebsiteConfiguration} [websiteConfiguration] - website * @param {WebsiteConfiguration} [websiteConfiguration] - website
* configuration * configuration
* @param {object[]} [cors] - collection of CORS rules to apply * @param {object[]} [cors] - collection of CORS rules to apply
@ -59,23 +57,17 @@ class BucketInfo {
* @param {object} [lifecycleConfiguration] - lifecycle configuration * @param {object} [lifecycleConfiguration] - lifecycle configuration
* @param {object} [bucketPolicy] - bucket policy * @param {object} [bucketPolicy] - bucket policy
* @param {string} [uid] - unique identifier for the bucket, necessary * @param {string} [uid] - unique identifier for the bucket, necessary
* @param {string} readLocationConstraint - readLocationConstraint for bucket
* addition for use with lifecycle operations
* @param {boolean} [isNFS] - whether the bucket is on NFS
* @param {object} [ingestionConfig] - object for ingestion status: en/dis
* @param {object} [azureInfo] - Azure storage account specific info
* @param {boolean} [objectLockEnabled] - true when object lock enabled * @param {boolean} [objectLockEnabled] - true when object lock enabled
* @param {object} [objectLockConfiguration] - object lock configuration * @param {object} [objectLockConfiguration] - object lock configuration
* @param {object} [notificationConfiguration] - bucket notification configuration * @param {object} [notificationConfiguration] - bucket notification configuration
*/ */
constructor(name, owner, ownerDisplayName, creationDate, constructor(name, owner, ownerDisplayName, creationDate,
mdBucketModelVersion, acl, transient, deleted, mdBucketModelVersion, acl, transient, deleted,
serverSideEncryption, versioningConfiguration, serverSideEncryption, versioningConfiguration,
locationConstraint, websiteConfiguration, cors, locationConstraint, websiteConfiguration, cors,
replicationConfiguration, lifecycleConfiguration, replicationConfiguration, lifecycleConfiguration,
bucketPolicy, uid, readLocationConstraint, isNFS, bucketPolicy, uid, objectLockEnabled, objectLockConfiguration,
ingestionConfig, azureInfo, objectLockEnabled, notificationConfiguration) {
objectLockConfiguration, notificationConfiguration) {
assert.strictEqual(typeof name, 'string'); assert.strictEqual(typeof name, 'string');
assert.strictEqual(typeof owner, 'string'); assert.strictEqual(typeof owner, 'string');
assert.strictEqual(typeof ownerDisplayName, 'string'); assert.strictEqual(typeof ownerDisplayName, 'string');
@ -94,7 +86,7 @@ class BucketInfo {
if (serverSideEncryption) { if (serverSideEncryption) {
assert.strictEqual(typeof serverSideEncryption, 'object'); assert.strictEqual(typeof serverSideEncryption, 'object');
const { cryptoScheme, algorithm, masterKeyId, const { cryptoScheme, algorithm, masterKeyId,
configuredMasterKeyId, mandatory } = serverSideEncryption; configuredMasterKeyId, mandatory } = serverSideEncryption;
assert.strictEqual(typeof cryptoScheme, 'number'); assert.strictEqual(typeof cryptoScheme, 'number');
assert.strictEqual(typeof algorithm, 'string'); assert.strictEqual(typeof algorithm, 'string');
assert.strictEqual(typeof masterKeyId, 'string'); assert.strictEqual(typeof masterKeyId, 'string');
@ -116,15 +108,6 @@ class BucketInfo {
if (locationConstraint) { if (locationConstraint) {
assert.strictEqual(typeof locationConstraint, 'string'); assert.strictEqual(typeof locationConstraint, 'string');
} }
if (ingestionConfig) {
assert.strictEqual(typeof ingestionConfig, 'object');
}
if (azureInfo) {
assert.strictEqual(typeof azureInfo, 'object');
}
if (readLocationConstraint) {
assert.strictEqual(typeof readLocationConstraint, 'string');
}
if (websiteConfiguration) { if (websiteConfiguration) {
assert(websiteConfiguration instanceof WebsiteConfiguration); assert(websiteConfiguration instanceof WebsiteConfiguration);
const { indexDocument, errorDocument, redirectAllRequestsTo, const { indexDocument, errorDocument, redirectAllRequestsTo,
@ -181,16 +164,12 @@ class BucketInfo {
this._serverSideEncryption = serverSideEncryption || null; this._serverSideEncryption = serverSideEncryption || null;
this._versioningConfiguration = versioningConfiguration || null; this._versioningConfiguration = versioningConfiguration || null;
this._locationConstraint = locationConstraint || null; this._locationConstraint = locationConstraint || null;
this._readLocationConstraint = readLocationConstraint || null;
this._websiteConfiguration = websiteConfiguration || null; this._websiteConfiguration = websiteConfiguration || null;
this._replicationConfiguration = replicationConfiguration || null; this._replicationConfiguration = replicationConfiguration || null;
this._cors = cors || null; this._cors = cors || null;
this._lifecycleConfiguration = lifecycleConfiguration || null; this._lifecycleConfiguration = lifecycleConfiguration || null;
this._bucketPolicy = bucketPolicy || null; this._bucketPolicy = bucketPolicy || null;
this._uid = uid || uuid(); this._uid = uid || uuid();
this._isNFS = isNFS || null;
this._ingestion = ingestionConfig || null;
this._azureInfo = azureInfo || null;
this._objectLockEnabled = objectLockEnabled || false; this._objectLockEnabled = objectLockEnabled || false;
this._objectLockConfiguration = objectLockConfiguration || null; this._objectLockConfiguration = objectLockConfiguration || null;
this._notificationConfiguration = notificationConfiguration || null; this._notificationConfiguration = notificationConfiguration || null;
@ -213,16 +192,12 @@ class BucketInfo {
serverSideEncryption: this._serverSideEncryption, serverSideEncryption: this._serverSideEncryption,
versioningConfiguration: this._versioningConfiguration, versioningConfiguration: this._versioningConfiguration,
locationConstraint: this._locationConstraint, locationConstraint: this._locationConstraint,
readLocationConstraint: this._readLocationConstraint,
websiteConfiguration: undefined, websiteConfiguration: undefined,
cors: this._cors, cors: this._cors,
replicationConfiguration: this._replicationConfiguration, replicationConfiguration: this._replicationConfiguration,
lifecycleConfiguration: this._lifecycleConfiguration, lifecycleConfiguration: this._lifecycleConfiguration,
bucketPolicy: this._bucketPolicy, bucketPolicy: this._bucketPolicy,
uid: this._uid, uid: this._uid,
isNFS: this._isNFS,
ingestion: this._ingestion,
azureInfo: this._azureInfo,
objectLockEnabled: this._objectLockEnabled, objectLockEnabled: this._objectLockEnabled,
objectLockConfiguration: this._objectLockConfiguration, objectLockConfiguration: this._objectLockConfiguration,
notificationConfiguration: this._notificationConfiguration, notificationConfiguration: this._notificationConfiguration,
@ -247,8 +222,7 @@ class BucketInfo {
obj.transient, obj.deleted, obj.serverSideEncryption, obj.transient, obj.deleted, obj.serverSideEncryption,
obj.versioningConfiguration, obj.locationConstraint, websiteConfig, obj.versioningConfiguration, obj.locationConstraint, websiteConfig,
obj.cors, obj.replicationConfiguration, obj.lifecycleConfiguration, obj.cors, obj.replicationConfiguration, obj.lifecycleConfiguration,
obj.bucketPolicy, obj.uid, obj.readLocationConstraint, obj.isNFS, obj.bucketPolicy, obj.uid, obj.objectLockEnabled,
obj.ingestion, obj.azureInfo, obj.objectLockEnabled,
obj.objectLockConfiguration, obj.notificationConfiguration); obj.objectLockConfiguration, obj.notificationConfiguration);
} }
@ -273,10 +247,8 @@ class BucketInfo {
data._versioningConfiguration, data._locationConstraint, data._versioningConfiguration, data._locationConstraint,
data._websiteConfiguration, data._cors, data._websiteConfiguration, data._cors,
data._replicationConfiguration, data._lifecycleConfiguration, data._replicationConfiguration, data._lifecycleConfiguration,
data._bucketPolicy, data._uid, data._readLocationConstraint, data._bucketPolicy, data._uid, data._objectLockEnabled,
data._isNFS, data._ingestion, data._azureInfo, data._objectLockConfiguration, data._notificationConfiguration);
data._objectLockEnabled, data._objectLockConfiguration,
data._notificationConfiguration);
} }
/** /**
@ -573,17 +545,6 @@ class BucketInfo {
return this._locationConstraint; return this._locationConstraint;
} }
/**
* Get read location constraint.
* @return {string} - bucket read location constraint
*/
getReadLocationConstraint() {
if (this._readLocationConstraint) {
return this._readLocationConstraint;
}
return this._locationConstraint;
}
/** /**
* Set Bucket model version * Set Bucket model version
* *
@ -672,85 +633,6 @@ class BucketInfo {
this._uid = uid; this._uid = uid;
return this; return this;
} }
/**
* Check if the bucket is an NFS bucket.
* @return {boolean} - Wether the bucket is NFS or not
*/
isNFS() {
return this._isNFS;
}
/**
* Set whether the bucket is an NFS bucket.
* @param {boolean} isNFS - Wether the bucket is NFS or not
* @return {BucketInfo} - bucket info instance
*/
setIsNFS(isNFS) {
this._isNFS = isNFS;
return this;
}
/**
* enable ingestion, set 'this._ingestion' to { status: 'enabled' }
* @return {BucketInfo} - bucket info instance
*/
enableIngestion() {
this._ingestion = { status: 'enabled' };
return this;
}
/**
* disable ingestion, set 'this._ingestion' to { status: 'disabled' }
* @return {BucketInfo} - bucket info instance
*/
disableIngestion() {
this._ingestion = { status: 'disabled' };
return this;
}
/**
* Get ingestion configuration
* @return {object} - bucket ingestion configuration: Enabled or Disabled
*/
getIngestion() {
return this._ingestion;
}
/**
** Check if bucket is an ingestion bucket
* @return {boolean} - 'true' if bucket is ingestion bucket, 'false' if
* otherwise
*/
isIngestionBucket() {
const ingestionConfig = this.getIngestion();
if (ingestionConfig) {
return true;
}
return false;
}
/**
* Check if ingestion is enabled
* @return {boolean} - 'true' if ingestion is enabled, otherwise 'false'
*/
isIngestionEnabled() {
const ingestionConfig = this.getIngestion();
return ingestionConfig ? ingestionConfig.status === 'enabled' : false;
}
/**
* Return the Azure specific storage account information for this bucket
* @return {object} - a structure suitable for {@link BucketAzureIno}
* constructor
*/
getAzureInfo() {
return this._azureInfo;
}
/**
* Set the Azure specific storage account information for this bucket
* @param {object} azureInfo - a structure suitable for
* {@link BucketAzureInfo} construction
* @return {BucketInfo} - bucket info instance
*/
setAzureInfo(azureInfo) {
this._azureInfo = azureInfo;
return this;
}
/** /**
* Check if object lock is enabled. * Check if object lock is enabled.
* @return {boolean} - depending on whether object lock is enabled * @return {boolean} - depending on whether object lock is enabled

View File

@ -5,8 +5,6 @@ const errors = require('../errors');
const LifecycleRule = require('./LifecycleRule'); const LifecycleRule = require('./LifecycleRule');
const escapeForXml = require('../s3middleware/escapeForXml'); const escapeForXml = require('../s3middleware/escapeForXml');
const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer.
/** /**
* Format of xml request: * Format of xml request:
@ -87,13 +85,10 @@ class LifecycleConfiguration {
/** /**
* Create a Lifecycle Configuration instance * Create a Lifecycle Configuration instance
* @param {string} xml - the parsed xml * @param {string} xml - the parsed xml
* @param {object} config - the CloudServer config
* @return {object} - LifecycleConfiguration instance * @return {object} - LifecycleConfiguration instance
*/ */
constructor(xml, config) { constructor(xml) {
this._parsedXML = xml; this._parsedXML = xml;
this._storageClasses =
config.replicationEndpoints.map(endpoint => endpoint.site);
this._ruleIDs = []; this._ruleIDs = [];
this._tagKeys = []; this._tagKeys = [];
this._config = {}; this._config = {};
@ -216,10 +211,9 @@ class LifecycleConfiguration {
*/ */
_parseRule(rule) { _parseRule(rule) {
const ruleObj = {}; const ruleObj = {};
if (rule.NoncurrentVersionTransition) { if (rule.Transition || rule.NoncurrentVersionTransition) {
ruleObj.error = errors.NotImplemented.customizeDescription( ruleObj.error = errors.NotImplemented.customizeDescription(
'NoncurrentVersionTransition lifecycle action not yet ' + 'Transition lifecycle action not yet implemented');
'implemented');
return ruleObj; return ruleObj;
} }
// Either Prefix or Filter must be included, but can be empty string // Either Prefix or Filter must be included, but can be empty string
@ -381,7 +375,7 @@ class LifecycleConfiguration {
if (!tags[i].Key || !tags[i].Value) { if (!tags[i].Key || !tags[i].Value) {
tagObj.error = tagObj.error =
errors.MissingRequiredParameter.customizeDescription( errors.MissingRequiredParameter.customizeDescription(
'Tag XML does not contain both Key and Value'); 'Tag XML does not contain both Key and Value');
break; break;
} }
@ -474,315 +468,6 @@ class LifecycleConfiguration {
return statusObj; return statusObj;
} }
/**
* Finds the prefix and/or tags of the given rule and gets the error message
* @param {object} rule - The rule to find the prefix in
* @return {string} - The prefix of filter information
*/
_getRuleFilterDesc(rule) {
if (rule.Prefix) {
return `prefix '${rule.Prefix[0]}'`;
}
// There must be a filter if no top-level prefix is provided. First
// check if there are multiple filters (i.e. `Filter.And`).
if (rule.Filter[0] === undefined || rule.Filter[0].And === undefined) {
const { Prefix, Tag } = rule.Filter[0] || {};
if (Prefix) {
return `filter '(prefix=${Prefix[0]})'`;
}
if (Tag) {
const { Key, Value } = Tag[0];
return `filter '(tag: key=${Key[0]}, value=${Value[0]})'`;
}
return 'filter (all)';
}
const filters = [];
const { Prefix, Tag } = rule.Filter[0].And[0];
if (Prefix) {
filters.push(`prefix=${Prefix[0]}`);
}
Tag.forEach(tag => {
const { Key, Value } = tag;
filters.push(`tag: key=${Key[0]}, value=${Value[0]}`);
});
const joinedFilters = filters.join(' and ');
return `filter '(${joinedFilters})'`;
}
/**
* Checks the validity of the given field
* @param {object} params - Given function parameters
* @param {string} params.days - The value of the field to check
* @param {string} params.field - The field name with the value
* @param {string} params.ancestor - The immediate ancestor field
* @return {object|null} Returns an error object or `null`
*/
_checkDays(params) {
const { days, field, ancestor } = params;
if (days < 0) {
const msg = `'${field}' in ${ancestor} action must be nonnegative`;
return errors.InvalidArgument.customizeDescription(msg);
}
if (days > MAX_DAYS) {
return errors.MalformedXML.customizeDescription(
`'${field}' in ${ancestor} action must not exceed ${MAX_DAYS}`);
}
return null;
}
/**
* Checks the validity of the given storage class
* @param {object} params - Given function parameters
* @param {array} params.usedStorageClasses - Storage classes used in other
* rules
* @param {string} params.storageClass - The storage class of the current
* rule
* @param {string} params.ancestor - The immediate ancestor field
* @param {string} params.prefix - The prefix of the rule
* @return {object|null} Returns an error object or `null`
*/
_checkStorageClasses(params) {
const { usedStorageClasses, storageClass, ancestor, rule } = params;
if (!this._storageClasses.includes(storageClass)) {
// This differs from the AWS message. This will help the user since
// the StorageClass does not conform to AWS specs.
const list = `'${this._storageClasses.join("', '")}'`;
const msg = `'StorageClass' must be one of ${list}`;
return errors.MalformedXML.customizeDescription(msg);
}
if (usedStorageClasses.includes(storageClass)) {
const msg = `'StorageClass' must be different for '${ancestor}' ` +
`actions in same 'Rule' with ${this._getRuleFilterDesc(rule)}`;
return errors.InvalidRequest.customizeDescription(msg);
}
return null;
}
/**
* Ensure that transition rules are at least a day apart from each other.
* @param {object} params - Given function parameters
* @param {string} [params.days] - The days of the current transition
* @param {string} [params.date] - The date of the current transition
* @param {string} params.storageClass - The storage class of the current
* rule
* @param {string} params.rule - The current rule
* @return {undefined}
*/
_checkTimeGap(params) {
const { days, date, storageClass, rule } = params;
const invalidTransition = rule.Transition.find(transition => {
if (storageClass === transition.StorageClass[0]) {
return false;
}
if (days !== undefined) {
return Number.parseInt(transition.Days[0], 10) === days;
}
if (date !== undefined) {
const timestamp = new Date(date).getTime();
const compareTimestamp = new Date(transition.Date[0]).getTime();
const oneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day.
return Math.abs(timestamp - compareTimestamp) < oneDay;
}
return false;
});
if (invalidTransition) {
const timeType = days !== undefined ? 'Days' : 'Date';
const filterMsg = this._getRuleFilterDesc(rule);
const compareStorageClass = invalidTransition.StorageClass[0];
const msg = `'${timeType}' in the 'Transition' action for ` +
`StorageClass '${storageClass}' for ${filterMsg} must be at ` +
`least one day apart from ${filterMsg} in the 'Transition' ` +
`action for StorageClass '${compareStorageClass}'`;
return errors.InvalidArgument.customizeDescription(msg);
}
return undefined;
}
/**
* Checks transition time type (i.e. 'Date' or 'Days') only occurs once
* across transitions and across transitions and expiration policies
* @param {object} params - Given function parameters
* @param {string} params.usedTimeType - The time type that has been used by
* another rule
* @param {string} params.currentTimeType - the time type used by the
* current rule
* @param {string} params.rule - The current rule
* @return {object|null} Returns an error object or `null`
*/
_checkTimeType(params) {
const { usedTimeType, currentTimeType, rule } = params;
if (usedTimeType && usedTimeType !== currentTimeType) {
const msg = "Found mixed 'Date' and 'Days' based Transition " +
'actions in lifecycle rule for ' +
`${this._getRuleFilterDesc(rule)}`;
return errors.InvalidRequest.customizeDescription(msg);
}
// Transition time type cannot differ from the expiration, if provided.
if (rule.Expiration &&
rule.Expiration[0][currentTimeType] === undefined) {
const msg = "Found mixed 'Date' and 'Days' based Expiration and " +
'Transition actions in lifecycle rule for ' +
`${this._getRuleFilterDesc(rule)}`;
return errors.InvalidRequest.customizeDescription(msg);
}
return null;
}
/**
* Checks the validity of the given date
* @param {string} date - The date the check
* @return {object|null} Returns an error object or `null`
*/
_checkDate(date) {
const isoRegex = new RegExp('^(-?(?:[1-9][0-9]*)?[0-9]{4})-' +
'(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9])' +
':([0-5][0-9]):([0-5][0-9])(.[0-9]+)?(Z)?$');
if (!isoRegex.test(date)) {
const msg = 'Date must be in ISO 8601 format';
return errors.InvalidArgument.customizeDescription(msg);
}
return null;
}
/**
* Parses the NonCurrentVersionTransition value
* @param {object} rule - Rule object from Rule array from this._parsedXml
* @return {object} - Contains error if parsing failed, otherwise contains
* the parsed nonCurrentVersionTransition array
*
* Format of result:
* result = {
* error: <error>,
* nonCurrentVersionTransition: [
* {
* noncurrentDays: <non-current-days>,
* storageClass: <storage-class>,
* },
* ...
* ]
* }
*/
_parseNoncurrentVersionTransition(rule) {
const nonCurrentVersionTransition = [];
const usedStorageClasses = [];
for (let i = 0; i < rule.NoncurrentVersionTransition.length; i++) {
const t = rule.NoncurrentVersionTransition[i]; // Transition object
const noncurrentDays =
t.NoncurrentDays && Number.parseInt(t.NoncurrentDays[0], 10);
const storageClass = t.StorageClass && t.StorageClass[0];
if (noncurrentDays === undefined || storageClass === undefined) {
return { error: errors.MalformedXML };
}
let error = this._checkDays({
days: noncurrentDays,
field: 'NoncurrentDays',
ancestor: 'NoncurrentVersionTransition',
});
if (error) {
return { error };
}
error = this._checkStorageClasses({
storageClass,
usedStorageClasses,
ancestor: 'NoncurrentVersionTransition',
rule,
});
if (error) {
return { error };
}
nonCurrentVersionTransition.push({ noncurrentDays, storageClass });
usedStorageClasses.push(storageClass);
}
return { nonCurrentVersionTransition };
}
/**
* Parses the Transition value
* @param {object} rule - Rule object from Rule array from this._parsedXml
* @return {object} - Contains error if parsing failed, otherwise contains
* the parsed transition array
*
* Format of result:
* result = {
* error: <error>,
* transition: [
* {
* days: <days>,
* date: <date>,
* storageClass: <storage-class>,
* },
* ...
* ]
* }
*/
_parseTransition(rule) {
const transition = [];
const usedStorageClasses = [];
let usedTimeType = null;
for (let i = 0; i < rule.Transition.length; i++) {
const t = rule.Transition[i]; // Transition object
const days = t.Days && Number.parseInt(t.Days[0], 10);
const date = t.Date && t.Date[0];
const storageClass = t.StorageClass && t.StorageClass[0];
if ((days === undefined && date === undefined) ||
(days !== undefined && date !== undefined) ||
(storageClass === undefined)) {
return { error: errors.MalformedXML };
}
let error = this._checkStorageClasses({
storageClass,
usedStorageClasses,
ancestor: 'Transition',
rule,
});
if (error) {
return { error };
}
usedStorageClasses.push(storageClass);
if (days !== undefined) {
error = this._checkTimeType({
usedTimeType,
currentTimeType: 'Days',
rule,
});
if (error) {
return { error };
}
usedTimeType = 'Days';
error = this._checkDays({
days,
field: 'Days',
ancestor: 'Transition',
});
if (error) {
return { error };
}
transition.push({ days, storageClass });
}
if (date !== undefined) {
error = this._checkTimeType({
usedTimeType,
currentTimeType: 'Date',
rule,
});
if (error) {
return { error };
}
usedTimeType = 'Date';
error = this._checkDate(date);
if (error) {
return { error };
}
transition.push({ date, storageClass });
}
error = this._checkTimeGap({ days, date, storageClass, rule });
if (error) {
return { error };
}
}
return { transition };
}
/** /**
* Check that action component of rule is valid * Check that action component of rule is valid
* @param {object} rule - a rule object from Rule array from this._parsedXml * @param {object} rule - a rule object from Rule array from this._parsedXml
@ -807,13 +492,8 @@ class LifecycleConfiguration {
const actionsObj = {}; const actionsObj = {};
actionsObj.propName = 'actions'; actionsObj.propName = 'actions';
actionsObj.actions = []; actionsObj.actions = [];
const validActions = [ const validActions = ['AbortIncompleteMultipartUpload',
'AbortIncompleteMultipartUpload', 'Expiration', 'NoncurrentVersionExpiration'];
'Expiration',
'NoncurrentVersionExpiration',
'NoncurrentVersionTransition',
'Transition',
];
validActions.forEach(a => { validActions.forEach(a => {
if (rule[a]) { if (rule[a]) {
actionsObj.actions.push({ actionName: `${a}` }); actionsObj.actions.push({ actionName: `${a}` });
@ -830,8 +510,7 @@ class LifecycleConfiguration {
if (action.error) { if (action.error) {
actionsObj.error = action.error; actionsObj.error = action.error;
} else { } else {
const actionTimes = ['days', 'date', 'deleteMarker', const actionTimes = ['days', 'date', 'deleteMarker'];
'transition', 'nonCurrentVersionTransition'];
actionTimes.forEach(t => { actionTimes.forEach(t => {
if (action[t]) { if (action[t]) {
// eslint-disable-next-line no-param-reassign // eslint-disable-next-line no-param-reassign
@ -918,9 +597,12 @@ class LifecycleConfiguration {
return expObj; return expObj;
} }
if (subExp.Date) { if (subExp.Date) {
const error = this._checkDate(subExp.Date[0]); const isoRegex = new RegExp('^(-?(?:[1-9][0-9]*)?[0-9]{4})-' +
if (error) { '(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9])' +
expObj.error = error; ':([0-5][0-9]):([0-5][0-9])(.[0-9]+)?(Z)?$');
if (!isoRegex.test(subExp.Date[0])) {
expObj.error = errors.InvalidArgument.customizeDescription(
'Date must be in ISO 8601 format');
} else { } else {
expObj.date = subExp.Date[0]; expObj.date = subExp.Date[0];
} }
@ -929,7 +611,7 @@ class LifecycleConfiguration {
const daysInt = parseInt(subExp.Days[0], 10); const daysInt = parseInt(subExp.Days[0], 10);
if (daysInt < 1) { if (daysInt < 1) {
expObj.error = errors.InvalidArgument.customizeDescription( expObj.error = errors.InvalidArgument.customizeDescription(
'Expiration days is not a positive integer'); 'Expiration days is not a positive integer');
} else { } else {
expObj.days = daysInt; expObj.days = daysInt;
} }
@ -1032,26 +714,6 @@ class LifecycleConfiguration {
if (a.deleteMarker) { if (a.deleteMarker) {
assert.strictEqual(typeof a.deleteMarker, 'string'); assert.strictEqual(typeof a.deleteMarker, 'string');
} }
if (a.nonCurrentVersionTransition) {
assert.strictEqual(
typeof a.nonCurrentVersionTransition, 'object');
a.nonCurrentVersionTransition.forEach(t => {
assert.strictEqual(typeof t.noncurrentDays, 'number');
assert.strictEqual(typeof t.storageClass, 'string');
});
}
if (a.transition) {
assert.strictEqual(typeof a.transition, 'object');
a.transition.forEach(t => {
if (t.days || t.days === 0) {
assert.strictEqual(typeof t.days, 'number');
}
if (t.date !== undefined) {
assert.strictEqual(typeof t.date, 'string');
}
assert.strictEqual(typeof t.storageClass, 'string');
});
}
}); });
}); });
} }
@ -1101,8 +763,7 @@ class LifecycleConfiguration {
} }
const Actions = actions.map(action => { const Actions = actions.map(action => {
const { actionName, days, date, deleteMarker, const { actionName, days, date, deleteMarker } = action;
nonCurrentVersionTransition, transition } = action;
let Action; let Action;
if (actionName === 'AbortIncompleteMultipartUpload') { if (actionName === 'AbortIncompleteMultipartUpload') {
Action = `<${actionName}><DaysAfterInitiation>${days}` + Action = `<${actionName}><DaysAfterInitiation>${days}` +
@ -1119,40 +780,6 @@ class LifecycleConfiguration {
Action = `<${actionName}>${Days}${Date}${DelMarker}` + Action = `<${actionName}>${Days}${Date}${DelMarker}` +
`</${actionName}>`; `</${actionName}>`;
} }
if (actionName === 'NoncurrentVersionTransition') {
const xml = [];
nonCurrentVersionTransition.forEach(transition => {
const { noncurrentDays, storageClass } = transition;
xml.push(
`<${actionName}>`,
`<NoncurrentDays>${noncurrentDays}` +
'</NoncurrentDays>',
`<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>`,
);
});
Action = xml.join('');
}
if (actionName === 'Transition') {
const xml = [];
transition.forEach(transition => {
const { days, date, storageClass } = transition;
let element;
if (days !== undefined) {
element = `<Days>${days}</Days>`;
}
if (date !== undefined) {
element = `<Date>${date}</Date>`;
}
xml.push(
`<${actionName}>`,
element,
`<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>`,
);
});
Action = xml.join('');
}
return Action; return Action;
}).join(''); }).join('');
return `<Rule>${ID}${Status}${Filter}${Actions}</Rule>`; return `<Rule>${ID}${Status}${Filter}${Actions}</Rule>`;

View File

@ -27,7 +27,7 @@ const errors = require('../errors');
* </NotificationConfiguration> * </NotificationConfiguration>
*/ */
/** /**
* Format of config: * Format of config:
* *
* config = { * config = {

View File

@ -17,7 +17,7 @@ const errors = require('../errors');
* </ObjectLockConfiguration> * </ObjectLockConfiguration>
*/ */
/** /**
* Format of config: * Format of config:
* *
* config = { * config = {

View File

@ -1,5 +1,3 @@
const crypto = require('crypto');
const constants = require('../constants'); const constants = require('../constants');
const VersionIDUtils = require('../versioning/VersionID'); const VersionIDUtils = require('../versioning/VersionID');
@ -10,6 +8,7 @@ const ObjectMDLocation = require('./ObjectMDLocation');
* mpuPart metadata for example) * mpuPart metadata for example)
*/ */
class ObjectMD { class ObjectMD {
/** /**
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is * Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
* reserved for internal use, users should call * reserved for internal use, users should call
@ -29,14 +28,9 @@ class ObjectMD {
} else { } else {
this._updateFromParsedJSON(objMd); this._updateFromParsedJSON(objMd);
} }
if (!this._data['creation-time']) {
this.setCreationTime(this.getLastModified());
}
} else { } else {
// set newly-created object md modified time to current time // set newly-created object md modified time to current time
const dt = new Date().toJSON(); this._data['last-modified'] = new Date().toJSON();
this.setLastModified(dt);
this.setCreationTime(dt);
} }
// set latest md model version now that we ensured // set latest md model version now that we ensured
// backward-compat conversion // backward-compat conversion
@ -91,8 +85,6 @@ class ObjectMD {
'content-length': 0, 'content-length': 0,
'content-type': '', 'content-type': '',
'content-md5': '', 'content-md5': '',
'content-language': '',
'creation-time': undefined,
// simple/no version. will expand once object versioning is // simple/no version. will expand once object versioning is
// introduced // introduced
'x-amz-version-id': 'null', 'x-amz-version-id': 'null',
@ -114,7 +106,6 @@ class ObjectMD {
}, },
'key': '', 'key': '',
'location': null, 'location': null,
'azureInfo': undefined,
// versionId, isNull, nullVersionId and isDeleteMarker // versionId, isNull, nullVersionId and isDeleteMarker
// should be undefined when not set explicitly // should be undefined when not set explicitly
'isNull': undefined, 'isNull': undefined,
@ -133,7 +124,6 @@ class ObjectMD {
role: '', role: '',
storageType: '', storageType: '',
dataStoreVersionId: '', dataStoreVersionId: '',
isNFS: null,
}, },
'dataStoreName': '', 'dataStoreName': '',
'originOp': '', 'originOp': '',
@ -148,7 +138,7 @@ class ObjectMD {
Object.assign(this._data, objMd._data); Object.assign(this._data, objMd._data);
Object.assign(this._data.replicationInfo, Object.assign(this._data.replicationInfo,
objMd._data.replicationInfo); objMd._data.replicationInfo);
} }
_updateFromParsedJSON(objMd) { _updateFromParsedJSON(objMd) {
@ -366,50 +356,6 @@ class ObjectMD {
return this._data['content-md5']; return this._data['content-md5'];
} }
/**
* Set content-language
*
* @param {string} contentLanguage - content-language
* @return {ObjectMD} itself
*/
setContentLanguage(contentLanguage) {
this._data['content-language'] = contentLanguage;
return this;
}
/**
* Returns content-language
*
* @return {string} content-language
*/
getContentLanguage() {
return this._data['content-language'];
}
/**
* Set Creation Date
*
* @param {string} creationTime - Creation Date
* @return {ObjectMD} itself
*/
setCreationTime(creationTime) {
this._data['creation-time'] = creationTime;
return this;
}
/**
* Returns Creation Date
*
* @return {string} Creation Date
*/
getCreationTime() {
// If creation-time is not set fallback to LastModified
if (!this._data['creation-time']) {
return this.getLastModified();
}
return this._data['creation-time'];
}
/** /**
* Set version id * Set version id
* *
@ -653,29 +599,6 @@ class ObjectMD {
return reducedLocations; return reducedLocations;
} }
/**
* Set the Azure specific information
* @param {object} azureInfo - a plain JS structure representing the
* Azure specific information for a Blob or a Container (see constructor
* of {@link ObjectMDAzureInfo} for a description of the fields of this
* structure
* @return {ObjectMD} itself
*/
setAzureInfo(azureInfo) {
this._data.azureInfo = azureInfo;
return this;
}
/**
* Get the Azure specific information
* @return {object} a plain JS structure representing the Azure specific
* information for a Blob or a Container an suitable for the constructor
* of {@link ObjectMDAzureInfo}.
*/
getAzureInfo() {
return this._data.azureInfo;
}
/** /**
* Set metadata isNull value * Set metadata isNull value
* *
@ -757,19 +680,6 @@ class ObjectMD {
return this._data.isDeleteMarker || false; return this._data.isDeleteMarker || false;
} }
/**
* Get if the object is a multipart upload (MPU)
*
* The function checks the "content-md5" field: if it contains a
* dash ('-') it is a MPU, as the content-md5 string ends with
* "-[nbparts]" for MPUs.
*
* @return {boolean} Whether object is a multipart upload
*/
isMultipartUpload() {
return this.getContentMd5().includes('-');
}
/** /**
* Set metadata versionId value * Set metadata versionId value
* *
@ -797,10 +707,7 @@ class ObjectMD {
* @return {string|undefined} The encoded object versionId * @return {string|undefined} The encoded object versionId
*/ */
getEncodedVersionId() { getEncodedVersionId() {
if (this.getVersionId()) { return VersionIDUtils.encode(this.getVersionId());
return VersionIDUtils.encode(this.getVersionId());
}
return undefined;
} }
/** /**
@ -843,20 +750,6 @@ class ObjectMD {
return this._data.tags; return this._data.tags;
} }
getUserMetadata() {
const metaHeaders = {};
const data = this.getValue();
Object.keys(data).forEach(key => {
if (key.startsWith('x-amz-meta-')) {
metaHeaders[key] = data[key];
}
});
if (Object.keys(metaHeaders).length > 0) {
return JSON.stringify(metaHeaders);
}
return undefined;
}
/** /**
* Set replication information * Set replication information
* *
@ -865,7 +758,7 @@ class ObjectMD {
*/ */
setReplicationInfo(replicationInfo) { setReplicationInfo(replicationInfo) {
const { status, backends, content, destination, storageClass, role, const { status, backends, content, destination, storageClass, role,
storageType, dataStoreVersionId, isNFS } = replicationInfo; storageType, dataStoreVersionId } = replicationInfo;
this._data.replicationInfo = { this._data.replicationInfo = {
status, status,
backends, backends,
@ -875,7 +768,6 @@ class ObjectMD {
role, role,
storageType: storageType || '', storageType: storageType || '',
dataStoreVersionId: dataStoreVersionId || '', dataStoreVersionId: dataStoreVersionId || '',
isNFS: isNFS || null,
}; };
return this; return this;
} }
@ -894,24 +786,6 @@ class ObjectMD {
return this; return this;
} }
/**
* Set whether the replication is occurring from an NFS bucket.
* @param {Boolean} isNFS - Whether replication from an NFS bucket
* @return {ObjectMD} itself
*/
setReplicationIsNFS(isNFS) {
this._data.replicationInfo.isNFS = isNFS;
return this;
}
/**
* Get whether the replication is occurring from an NFS bucket.
* @return {Boolean} Whether replication from an NFS bucket
*/
getReplicationIsNFS() {
return this._data.replicationInfo.isNFS;
}
setReplicationSiteStatus(site, status) { setReplicationSiteStatus(site, status) {
const backend = this._data.replicationInfo.backends const backend = this._data.replicationInfo.backends
.find(o => o.site === site); .find(o => o.site === site);
@ -958,11 +832,6 @@ class ObjectMD {
return this; return this;
} }
setReplicationStorageType(storageType) {
this._data.replicationInfo.storageType = storageType;
return this;
}
setReplicationStorageClass(storageClass) { setReplicationStorageClass(storageClass) {
this._data.replicationInfo.storageClass = storageClass; this._data.replicationInfo.storageClass = storageClass;
return this; return this;
@ -1044,9 +913,6 @@ class ObjectMD {
Object.keys(metaHeaders).forEach(key => { Object.keys(metaHeaders).forEach(key => {
if (key.startsWith('x-amz-meta-')) { if (key.startsWith('x-amz-meta-')) {
this._data[key] = metaHeaders[key]; this._data[key] = metaHeaders[key];
} else if (key.startsWith('x-ms-meta-')) {
const _key = key.replace('x-ms-meta-', 'x-amz-meta-');
this._data[_key] = metaHeaders[key];
} }
}); });
// If a multipart object and the acl is already parsed, we update it // If a multipart object and the acl is already parsed, we update it
@ -1056,20 +922,6 @@ class ObjectMD {
return this; return this;
} }
/**
* Clear all existing meta headers (used for Azure)
*
* @return {ObjectMD} itself
*/
clearMetadataValues() {
Object.keys(this._data).forEach(key => {
if (key.startsWith('x-amz-meta')) {
delete this._data[key];
}
});
return this;
}
/** /**
* overrideMetadataValues (used for complete MPU and object copy) * overrideMetadataValues (used for complete MPU and object copy)
* *
@ -1081,39 +933,6 @@ class ObjectMD {
return this; return this;
} }
/**
* Create or update the microVersionId field
*
* This field can be used to force an update in MongoDB. This can
* be needed in the following cases:
*
* - in case no other metadata field changes
*
* - to detect a change when fields change but object version does
* not change e.g. when ingesting a putObjectTagging coming from
* S3C to Zenko
*
* - to manage conflicts during concurrent updates, using
* conditions on the microVersionId field.
*
* It's a field of 16 hexadecimal characters randomly generated
*
* @return {ObjectMD} itself
*/
updateMicroVersionId() {
this._data.microVersionId = crypto.randomBytes(8).toString('hex');
}
/**
* Get the microVersionId field, or null if not set
*
* @return {string|null} the microVersionId field if exists, or
* {null} if it does not exist
*/
getMicroVersionId() {
return this._data.microVersionId || null;
}
/** /**
* Set object legal hold status * Set object legal hold status
* @param {boolean} legalHold - true if legal hold is 'ON' false if 'OFF' * @param {boolean} legalHold - true if legal hold is 'ON' false if 'OFF'

View File

@ -1,162 +0,0 @@
/**
* Helper class to ease access to the Azure specific information for
* Blob and Container objects.
*/
class ObjectMDAzureInfo {
/**
* @constructor
* @param {object} obj - Raw structure for the Azure info on Blob/Container
* @param {string} obj.containerPublicAccess - Public access authorization
* type
* @param {object[]} obj.containerStoredAccessPolicies - Access policies
* for Shared Access Signature bearer
* @param {object} obj.containerImmutabilityPolicy - data immutability
* policy for this container
* @param {boolean} obj.containerLegalHoldStatus - legal hold status for
* this container
* @param {boolean} obj.containerDeletionInProgress - deletion in progress
* indicator for this container
* @param {string} obj.blobType - defines the type of blob for this object
* @param {string} obj.blobContentMD5 - whole object MD5 sum set by the
* client through the Azure API
* @param {string} obj.blobIssuedETag - backup of the issued ETag on MD only
* operations like Set Blob Properties and Set Blob Metadata
* @param {object} obj.blobCopyInfo - information pertaining to past and
* pending copy operation targeting this object
* @param {number} obj.blobSequenceNumber - sequence number for a PageBlob
* @param {Date} obj.blobAccessTierChangeTime - date of change of tier
* @param {boolean} obj.blobUncommitted - A block has been put for a
* nonexistent blob which is about to be created
*/
constructor(obj) {
this._data = {
containerPublicAccess: obj.containerPublicAccess,
containerStoredAccessPolicies: obj.containerStoredAccessPolicies,
containerImmutabilityPolicy: obj.containerImmutabilityPolicy,
containerLegalHoldStatus: obj.containerLegalHoldStatus,
containerDeletionInProgress: obj.containerDeletionInProgress,
blobType: obj.blobType,
blobContentMD5: obj.blobContentMD5,
blobIssuedETag: obj.blobIssuedETag,
blobCopyInfo: obj.blobCopyInfo,
blobSequenceNumber: obj.blobSequenceNumber,
blobAccessTierChangeTime: obj.blobAccessTierChangeTime,
blobUncommitted: obj.blobUncommitted,
};
}
getContainerPublicAccess() {
return this._data.containerPublicAccess;
}
setContainerPublicAccess(containerPublicAccess) {
this._data.containerPublicAccess = containerPublicAccess;
return this;
}
getContainerStoredAccessPolicies() {
return this._data.containerStoredAccessPolicies;
}
setContainerStoredAccessPolicies(containerStoredAccessPolicies) {
this._data.containerStoredAccessPolicies =
containerStoredAccessPolicies;
return this;
}
getContainerImmutabilityPolicy() {
return this._data.containerImmutabilityPolicy;
}
setContainerImmutabilityPolicy(containerImmutabilityPolicy) {
this._data.containerImmutabilityPolicy = containerImmutabilityPolicy;
return this;
}
getContainerLegalHoldStatus() {
return this._data.containerLegalHoldStatus;
}
setContainerLegalHoldStatus(containerLegalHoldStatus) {
this._data.containerLegalHoldStatus = containerLegalHoldStatus;
return this;
}
getContainerDeletionInProgress() {
return this._data.containerDeletionInProgress;
}
setContainerDeletionInProgress(containerDeletionInProgress) {
this._data.containerDeletionInProgress = containerDeletionInProgress;
return this;
}
getBlobType() {
return this._data.blobType;
}
setBlobType(blobType) {
this._data.blobType = blobType;
return this;
}
getBlobContentMD5() {
return this._data.blobContentMD5;
}
setBlobContentMD5(blobContentMD5) {
this._data.blobContentMD5 = blobContentMD5;
return this;
}
getBlobIssuedETag() {
return this._data.blobIssuedETag;
}
setBlobIssuedETag(blobIssuedETag) {
this._data.blobIssuedETag = blobIssuedETag;
return this;
}
getBlobCopyInfo() {
return this._data.blobCopyInfo;
}
setBlobCopyInfo(blobCopyInfo) {
this._data.blobCopyInfo = blobCopyInfo;
return this;
}
getBlobSequenceNumber() {
return this._data.blobSequenceNumber;
}
setBlobSequenceNumber(blobSequenceNumber) {
this._data.blobSequenceNumber = blobSequenceNumber;
return this;
}
getBlobAccessTierChangeTime() {
return this._data.blobAccessTierChangeTime;
}
setBlobAccessTierChangeTime(blobAccessTierChangeTime) {
this._data.blobAccessTierChangeTime = blobAccessTierChangeTime;
return this;
}
getBlobUncommitted() {
return this._data.blobUncommitted;
}
setBlobUncommitted(blobUncommitted) {
this._data.blobUncommitted = blobUncommitted;
return this;
}
getValue() {
return this._data;
}
}
module.exports = ObjectMDAzureInfo;

View File

@ -3,6 +3,7 @@
* 'location' array * 'location' array
*/ */
class ObjectMDLocation { class ObjectMDLocation {
/** /**
* @constructor * @constructor
* @param {object} locationObj - single data location info * @param {object} locationObj - single data location info
@ -13,14 +14,10 @@ class ObjectMDLocation {
* @param {string} locationObj.dataStoreName - type of data store * @param {string} locationObj.dataStoreName - type of data store
* @param {string} locationObj.dataStoreETag - internal ETag of * @param {string} locationObj.dataStoreETag - internal ETag of
* data part * data part
* @param {string} [locationObj.dataStoreVersionId] - versionId,
* needed for cloud backends
* @param {number} [location.cryptoScheme] - if location data is * @param {number} [location.cryptoScheme] - if location data is
* encrypted: the encryption scheme version * encrypted: the encryption scheme version
* @param {string} [location.cipheredDataKey] - if location data * @param {string} [location.cipheredDataKey] - if location data
* is encrypted: the base64-encoded ciphered data key * is encrypted: the base64-encoded ciphered data key
* @param {string} [locationObj.blockId] - blockId of the part,
* set by the Azure Blob Service REST API frontend
*/ */
constructor(locationObj) { constructor(locationObj) {
this._data = { this._data = {
@ -29,8 +26,6 @@ class ObjectMDLocation {
size: locationObj.size, size: locationObj.size,
dataStoreName: locationObj.dataStoreName, dataStoreName: locationObj.dataStoreName,
dataStoreETag: locationObj.dataStoreETag, dataStoreETag: locationObj.dataStoreETag,
dataStoreVersionId: locationObj.dataStoreVersionId,
blockId: locationObj.blockId,
}; };
if (locationObj.cryptoScheme) { if (locationObj.cryptoScheme) {
this._data.cryptoScheme = locationObj.cryptoScheme; this._data.cryptoScheme = locationObj.cryptoScheme;
@ -52,7 +47,6 @@ class ObjectMDLocation {
* @param {object} location - single data location info * @param {object} location - single data location info
* @param {string} location.key - data backend key * @param {string} location.key - data backend key
* @param {string} location.dataStoreName - type of data store * @param {string} location.dataStoreName - type of data store
* @param {string} [location.dataStoreVersionId] - data backend version ID
* @param {number} [location.cryptoScheme] - if location data is * @param {number} [location.cryptoScheme] - if location data is
* encrypted: the encryption scheme version * encrypted: the encryption scheme version
* @param {string} [location.cipheredDataKey] - if location data * @param {string} [location.cipheredDataKey] - if location data
@ -63,7 +57,6 @@ class ObjectMDLocation {
[ [
'key', 'key',
'dataStoreName', 'dataStoreName',
'dataStoreVersionId',
'cryptoScheme', 'cryptoScheme',
'cipheredDataKey', 'cipheredDataKey',
].forEach(attrName => { ].forEach(attrName => {
@ -80,10 +73,6 @@ class ObjectMDLocation {
return this._data.dataStoreETag; return this._data.dataStoreETag;
} }
getDataStoreVersionId() {
return this._data.dataStoreVersionId;
}
getPartNumber() { getPartNumber() {
return Number.parseInt(this._data.dataStoreETag.split(':')[0], 10); return Number.parseInt(this._data.dataStoreETag.split(':')[0], 10);
} }
@ -118,15 +107,6 @@ class ObjectMDLocation {
return this._data.cipheredDataKey; return this._data.cipheredDataKey;
} }
getBlockId() {
return this._data.blockId;
}
setBlockId(blockId) {
this._data.blockId = blockId;
return this;
}
getValue() { getValue() {
return this._data; return this._data;
} }

View File

@ -59,7 +59,6 @@ class ReplicationConfiguration {
this._rules = null; this._rules = null;
this._prevStorageClass = null; this._prevStorageClass = null;
this._hasScalityDestination = null; this._hasScalityDestination = null;
this._preferredReadLocation = null;
} }
/** /**
@ -86,18 +85,6 @@ class ReplicationConfiguration {
return this._rules; return this._rules;
} }
/**
* The preferred read location
* @return {string|null} - The preferred read location if defined,
* otherwise null
*
* FIXME ideally we should be able to specify one preferred read
* location for each rule
*/
getPreferredReadLocation() {
return this._preferredReadLocation;
}
/** /**
* Get the replication configuration * Get the replication configuration
* @return {object} - The replication configuration * @return {object} - The replication configuration
@ -107,7 +94,6 @@ class ReplicationConfiguration {
role: this.getRole(), role: this.getRole(),
destination: this.getDestination(), destination: this.getDestination(),
rules: this.getRules(), rules: this.getRules(),
preferredReadLocation: this.getPreferredReadLocation(),
}; };
} }
@ -306,14 +292,6 @@ class ReplicationConfiguration {
return undefined; return undefined;
} }
const storageClasses = destination.StorageClass[0].split(','); const storageClasses = destination.StorageClass[0].split(',');
const prefReadIndex = storageClasses.findIndex(storageClass =>
storageClass.endsWith(':preferred_read'));
if (prefReadIndex !== -1) {
const prefRead = storageClasses[prefReadIndex].split(':')[0];
// remove :preferred_read tag from storage class name
storageClasses[prefReadIndex] = prefRead;
this._preferredReadLocation = prefRead;
}
const isValidStorageClass = storageClasses.every(storageClass => { const isValidStorageClass = storageClasses.every(storageClass => {
if (validStorageClasses.includes(storageClass)) { if (validStorageClasses.includes(storageClass)) {
this._hasScalityDestination = this._hasScalityDestination =

View File

@ -111,7 +111,7 @@ class RoundRobin {
pickHost() { pickHost() {
if (this.logger) { if (this.logger) {
this.logger.debug('pick host', this.logger.debug('pick host',
{ host: this.getCurrentHost() }); { host: this.getCurrentHost() });
} }
const curHost = this.getCurrentHost(); const curHost = this.getCurrentHost();
++this.pickCount; ++this.pickCount;
@ -163,7 +163,7 @@ class RoundRobin {
} }
if (this.logger) { if (this.logger) {
this.logger.debug('round robin host', this.logger.debug('round robin host',
{ newHost: this.getCurrentHost() }); { newHost: this.getCurrentHost() });
} }
} }
} }

View File

@ -10,6 +10,7 @@ const { checkSupportIPv6 } = require('./utils');
class Server { class Server {
/** /**
* @constructor * @constructor
* *
@ -368,8 +369,6 @@ class Server {
error: err.stack || err, error: err.stack || err,
address: sock.address(), address: sock.address(),
}); });
// socket is not systematically destroyed
sock.destroy();
} }
/** /**
@ -430,16 +429,16 @@ class Server {
// Setting no delay of the socket to the value configured // Setting no delay of the socket to the value configured
sock.setNoDelay(this.isNoDelay()); sock.setNoDelay(this.isNoDelay());
sock.on('error', err => this._logger.info( sock.on('error', err => this._logger.info(
'socket error - request rejected', { error: err })); 'socket error - request rejected', { error: err }));
}); });
this._server.on('tlsClientError', (err, sock) => this._server.on('tlsClientError', (err, sock) =>
this._onClientError(err, sock)); this._onClientError(err, sock));
this._server.on('clientError', (err, sock) => this._server.on('clientError', (err, sock) =>
this._onClientError(err, sock)); this._onClientError(err, sock));
this._server.on('checkContinue', (req, res) => this._server.on('checkContinue', (req, res) =>
this._onCheckContinue(req, res)); this._onCheckContinue(req, res));
this._server.on('checkExpectation', (req, res) => this._server.on('checkExpectation', (req, res) =>
this._onCheckExpectation(req, res)); this._onCheckExpectation(req, res));
this._server.on('listening', () => this._onListening()); this._server.on('listening', () => this._onListening());
} }
this._server.listen(this._port, this._address); this._server.listen(this._port, this._address);

View File

@ -72,8 +72,8 @@ function getByteRangeFromSpec(rangeSpec, objectSize) {
if (rangeSpec.start < objectSize) { if (rangeSpec.start < objectSize) {
// test is false if end is undefined // test is false if end is undefined
return { range: [rangeSpec.start, return { range: [rangeSpec.start,
(rangeSpec.end < objectSize ? (rangeSpec.end < objectSize ?
rangeSpec.end : objectSize - 1)] }; rangeSpec.end : objectSize - 1)] };
} }
return { error: errors.InvalidRange }; return { error: errors.InvalidRange };
} }

View File

@ -95,8 +95,8 @@ function _negotiateProtocolVersion(client, logger, cb) {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::negotiateProtocolVersion', logger.error('KMIP::negotiateProtocolVersion',
{ error, { error,
vendorIdentification: client.vendorIdentification }); vendorIdentification: client.vendorIdentification });
return cb(error); return cb(error);
} }
const majorVersions = const majorVersions =
@ -107,8 +107,8 @@ function _negotiateProtocolVersion(client, logger, cb) {
majorVersions.length !== minorVersions.length) { majorVersions.length !== minorVersions.length) {
const error = _arsenalError('No suitable protocol version'); const error = _arsenalError('No suitable protocol version');
logger.error('KMIP::negotiateProtocolVersion', logger.error('KMIP::negotiateProtocolVersion',
{ error, { error,
vendorIdentification: client.vendorIdentification }); vendorIdentification: client.vendorIdentification });
return cb(error); return cb(error);
} }
client.kmip.changeProtocolVersion(majorVersions[0], minorVersions[0]); client.kmip.changeProtocolVersion(majorVersions[0], minorVersions[0]);
@ -131,8 +131,8 @@ function _mapExtensions(client, logger, cb) {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::mapExtensions', logger.error('KMIP::mapExtensions',
{ error, { error,
vendorIdentification: client.vendorIdentification }); vendorIdentification: client.vendorIdentification });
return cb(error); return cb(error);
} }
const extensionNames = response.lookup(searchFilter.extensionName); const extensionNames = response.lookup(searchFilter.extensionName);
@ -140,8 +140,8 @@ function _mapExtensions(client, logger, cb) {
if (extensionNames.length !== extensionTags.length) { if (extensionNames.length !== extensionTags.length) {
const error = _arsenalError('Inconsistent extension list'); const error = _arsenalError('Inconsistent extension list');
logger.error('KMIP::mapExtensions', logger.error('KMIP::mapExtensions',
{ error, { error,
vendorIdentification: client.vendorIdentification }); vendorIdentification: client.vendorIdentification });
return cb(error); return cb(error);
} }
extensionNames.forEach((extensionName, idx) => { extensionNames.forEach((extensionName, idx) => {
@ -165,7 +165,7 @@ function _queryServerInformation(client, logger, cb) {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.warn('KMIP::queryServerInformation', logger.warn('KMIP::queryServerInformation',
{ error }); { error });
/* no error returned, caller can keep going */ /* no error returned, caller can keep going */
return cb(); return cb();
} }
@ -175,9 +175,9 @@ function _queryServerInformation(client, logger, cb) {
JSON.stringify(response.lookup(searchFilter.serverInformation)[0])); JSON.stringify(response.lookup(searchFilter.serverInformation)[0]));
logger.info('KMIP Server identified', logger.info('KMIP Server identified',
{ vendorIdentification: client.vendorIdentification, { vendorIdentification: client.vendorIdentification,
serverInformation: client.serverInformation, serverInformation: client.serverInformation,
negotiatedProtocolVersion: client.kmip.protocolVersion }); negotiatedProtocolVersion: client.kmip.protocolVersion });
return cb(); return cb();
}); });
} }
@ -201,8 +201,8 @@ function _queryOperationsAndObjects(client, logger, cb) {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::queryOperationsAndObjects', logger.error('KMIP::queryOperationsAndObjects',
{ error, { error,
vendorIdentification: client.vendorIdentification }); vendorIdentification: client.vendorIdentification });
return cb(error); return cb(error);
} }
const supportedOperations = response.lookup(searchFilter.operation); const supportedOperations = response.lookup(searchFilter.operation);
@ -227,15 +227,15 @@ function _queryOperationsAndObjects(client, logger, cb) {
logger.warn('KMIP::queryOperationsAndObjects: ' + logger.warn('KMIP::queryOperationsAndObjects: ' +
'The KMIP Server announces that it ' + 'The KMIP Server announces that it ' +
'does not support all of the required features', 'does not support all of the required features',
{ vendorIdentification: client.vendorIdentification, { vendorIdentification: client.vendorIdentification,
serverInformation: client.serverInformation, serverInformation: client.serverInformation,
supportsEncrypt, supportsDecrypt, supportsEncrypt, supportsDecrypt,
supportsActivate, supportsRevoke, supportsActivate, supportsRevoke,
supportsCreate, supportsDestroy, supportsCreate, supportsDestroy,
supportsQuery, supportsSymmetricKeys }); supportsQuery, supportsSymmetricKeys });
} else { } else {
logger.info('KMIP Server provides the necessary feature set', logger.info('KMIP Server provides the necessary feature set',
{ vendorIdentification: client.vendorIdentification }); { vendorIdentification: client.vendorIdentification });
} }
return cb(); return cb();
}); });
@ -269,8 +269,8 @@ class Client {
this.vendorIdentification = ''; this.vendorIdentification = '';
this.serverInformation = []; this.serverInformation = [];
this.kmip = new KMIP(CodecClass || TTLVCodec, this.kmip = new KMIP(CodecClass || TTLVCodec,
TransportClass || TlsTransport, TransportClass || TlsTransport,
options); options);
this.kmip.registerHandshakeFunction((logger, cb) => { this.kmip.registerHandshakeFunction((logger, cb) => {
this._kmipHandshake(logger, cb); this._kmipHandshake(logger, cb);
}); });
@ -327,8 +327,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::_activateBucketKey', logger.error('KMIP::_activateBucketKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const uniqueIdentifier = const uniqueIdentifier =
@ -337,7 +337,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server did not return the expected identifier'); 'Server did not return the expected identifier');
logger.error('KMIP::cipherDataKey', logger.error('KMIP::cipherDataKey',
{ error, uniqueIdentifier }); { error, uniqueIdentifier });
return cb(error); return cb(error);
} }
return cb(null, keyIdentifier); return cb(null, keyIdentifier);
@ -356,20 +356,20 @@ class Client {
const attributes = []; const attributes = [];
if (!!this.options.bucketNameAttributeName) { if (!!this.options.bucketNameAttributeName) {
attributes.push(KMIP.Attribute('TextString', attributes.push(KMIP.Attribute('TextString',
this.options.bucketNameAttributeName, this.options.bucketNameAttributeName,
bucketName)); bucketName));
} }
attributes.push(...[ attributes.push(...[
KMIP.Attribute('Enumeration', 'Cryptographic Algorithm', KMIP.Attribute('Enumeration', 'Cryptographic Algorithm',
CRYPTOGRAPHIC_ALGORITHM), CRYPTOGRAPHIC_ALGORITHM),
KMIP.Attribute('Integer', 'Cryptographic Length', KMIP.Attribute('Integer', 'Cryptographic Length',
CRYPTOGRAPHIC_LENGTH), CRYPTOGRAPHIC_LENGTH),
KMIP.Attribute('Integer', 'Cryptographic Usage Mask', KMIP.Attribute('Integer', 'Cryptographic Usage Mask',
this.kmip.encodeMask('Cryptographic Usage Mask', this.kmip.encodeMask('Cryptographic Usage Mask',
CRYPTOGRAPHIC_USAGE_MASK))]); CRYPTOGRAPHIC_USAGE_MASK))]);
if (this.options.compoundCreateActivate) { if (this.options.compoundCreateActivate) {
attributes.push(KMIP.Attribute('Date-Time', 'Activation Date', attributes.push(KMIP.Attribute('Date-Time', 'Activation Date',
new Date(Date.UTC()))); new Date(Date.UTC())));
} }
return this.kmip.request(logger, 'Create', [ return this.kmip.request(logger, 'Create', [
@ -379,8 +379,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::createBucketKey', logger.error('KMIP::createBucketKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const createdObjectType = const createdObjectType =
@ -391,7 +391,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server created an object of wrong type'); 'Server created an object of wrong type');
logger.error('KMIP::createBucketKey', logger.error('KMIP::createBucketKey',
{ error, createdObjectType }); { error, createdObjectType });
return cb(error); return cb(error);
} }
if (!this.options.compoundCreateActivate) { if (!this.options.compoundCreateActivate) {
@ -416,16 +416,16 @@ class Client {
KMIP.TextString('Unique Identifier', bucketKeyId), KMIP.TextString('Unique Identifier', bucketKeyId),
KMIP.Structure('Revocation Reason', [ KMIP.Structure('Revocation Reason', [
KMIP.Enumeration('Revocation Reason Code', KMIP.Enumeration('Revocation Reason Code',
'Cessation of Operation'), 'Cessation of Operation'),
KMIP.TextString('Revocation Message', KMIP.TextString('Revocation Message',
'About to be deleted'), 'About to be deleted'),
]), ]),
], (err, response) => { ], (err, response) => {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::_revokeBucketKey', logger.error('KMIP::_revokeBucketKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const uniqueIdentifier = const uniqueIdentifier =
@ -434,7 +434,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server did not return the expected identifier'); 'Server did not return the expected identifier');
logger.error('KMIP::_revokeBucketKey', logger.error('KMIP::_revokeBucketKey',
{ error, uniqueIdentifier }); { error, uniqueIdentifier });
return cb(error); return cb(error);
} }
return cb(); return cb();
@ -453,8 +453,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::destroyBucketKey: revocation failed', logger.error('KMIP::destroyBucketKey: revocation failed',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
return this.kmip.request(logger, 'Destroy', [ return this.kmip.request(logger, 'Destroy', [
@ -463,8 +463,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::destroyBucketKey', logger.error('KMIP::destroyBucketKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const uniqueIdentifier = const uniqueIdentifier =
@ -473,7 +473,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server did not return the expected identifier'); 'Server did not return the expected identifier');
logger.error('KMIP::destroyBucketKey', logger.error('KMIP::destroyBucketKey',
{ error, uniqueIdentifier }); { error, uniqueIdentifier });
return cb(error); return cb(error);
} }
return cb(); return cb();
@ -492,19 +492,19 @@ class Client {
* @callback called with (err, cipheredDataKey: Buffer) * @callback called with (err, cipheredDataKey: Buffer)
*/ */
cipherDataKey(cryptoScheme, cipherDataKey(cryptoScheme,
masterKeyId, masterKeyId,
plainTextDataKey, plainTextDataKey,
logger, logger,
cb) { cb) {
return this.kmip.request(logger, 'Encrypt', [ return this.kmip.request(logger, 'Encrypt', [
KMIP.TextString('Unique Identifier', masterKeyId), KMIP.TextString('Unique Identifier', masterKeyId),
KMIP.Structure('Cryptographic Parameters', [ KMIP.Structure('Cryptographic Parameters', [
KMIP.Enumeration('Block Cipher Mode', KMIP.Enumeration('Block Cipher Mode',
CRYPTOGRAPHIC_CIPHER_MODE), CRYPTOGRAPHIC_CIPHER_MODE),
KMIP.Enumeration('Padding Method', KMIP.Enumeration('Padding Method',
CRYPTOGRAPHIC_PADDING_METHOD), CRYPTOGRAPHIC_PADDING_METHOD),
KMIP.Enumeration('Cryptographic Algorithm', KMIP.Enumeration('Cryptographic Algorithm',
CRYPTOGRAPHIC_ALGORITHM), CRYPTOGRAPHIC_ALGORITHM),
]), ]),
KMIP.ByteString('Data', plainTextDataKey), KMIP.ByteString('Data', plainTextDataKey),
KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV), KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV),
@ -512,8 +512,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::cipherDataKey', logger.error('KMIP::cipherDataKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const uniqueIdentifier = const uniqueIdentifier =
@ -523,7 +523,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server did not return the expected identifier'); 'Server did not return the expected identifier');
logger.error('KMIP::cipherDataKey', logger.error('KMIP::cipherDataKey',
{ error, uniqueIdentifier }); { error, uniqueIdentifier });
return cb(error); return cb(error);
} }
return cb(null, data); return cb(null, data);
@ -541,19 +541,19 @@ class Client {
* @callback called with (err, plainTextDataKey: Buffer) * @callback called with (err, plainTextDataKey: Buffer)
*/ */
decipherDataKey(cryptoScheme, decipherDataKey(cryptoScheme,
masterKeyId, masterKeyId,
cipheredDataKey, cipheredDataKey,
logger, logger,
cb) { cb) {
return this.kmip.request(logger, 'Decrypt', [ return this.kmip.request(logger, 'Decrypt', [
KMIP.TextString('Unique Identifier', masterKeyId), KMIP.TextString('Unique Identifier', masterKeyId),
KMIP.Structure('Cryptographic Parameters', [ KMIP.Structure('Cryptographic Parameters', [
KMIP.Enumeration('Block Cipher Mode', KMIP.Enumeration('Block Cipher Mode',
CRYPTOGRAPHIC_CIPHER_MODE), CRYPTOGRAPHIC_CIPHER_MODE),
KMIP.Enumeration('Padding Method', KMIP.Enumeration('Padding Method',
CRYPTOGRAPHIC_PADDING_METHOD), CRYPTOGRAPHIC_PADDING_METHOD),
KMIP.Enumeration('Cryptographic Algorithm', KMIP.Enumeration('Cryptographic Algorithm',
CRYPTOGRAPHIC_ALGORITHM), CRYPTOGRAPHIC_ALGORITHM),
]), ]),
KMIP.ByteString('Data', cipheredDataKey), KMIP.ByteString('Data', cipheredDataKey),
KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV), KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV),
@ -561,8 +561,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::decipherDataKey', logger.error('KMIP::decipherDataKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const uniqueIdentifier = const uniqueIdentifier =
@ -572,7 +572,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server did not return the right identifier'); 'Server did not return the right identifier');
logger.error('KMIP::decipherDataKey', logger.error('KMIP::decipherDataKey',
{ error, uniqueIdentifier }); { error, uniqueIdentifier });
return cb(error); return cb(error);
} }
return cb(null, data); return cb(null, data);

View File

@ -55,15 +55,15 @@ function TTLVCodec() {
const property = {}; const property = {};
if (!TypeDecoder[elementType]) { if (!TypeDecoder[elementType]) {
_throwError(logger, _throwError(logger,
'Unknown element type', 'Unknown element type',
{ funcName, elementTag, elementType }); { funcName, elementTag, elementType });
} }
const elementValue = value.slice(i + 8, const elementValue = value.slice(i + 8,
i + 8 + elementLength); i + 8 + elementLength);
if (elementValue.length !== elementLength) { if (elementValue.length !== elementLength) {
_throwError(logger, 'BUG: Wrong buffer size', _throwError(logger, 'BUG: Wrong buffer size',
{ funcName, elementLength, { funcName, elementLength,
bufferLength: elementValue.length }); bufferLength: elementValue.length });
} }
property.type = TypeDecoder[elementType].name; property.type = TypeDecoder[elementType].name;
property.value = TypeDecoder[elementType] property.value = TypeDecoder[elementType]
@ -75,7 +75,7 @@ function TTLVCodec() {
const tagInfo = TagDecoder[elementTag]; const tagInfo = TagDecoder[elementTag];
if (!tagInfo) { if (!tagInfo) {
logger.debug('Unknown element tag', logger.debug('Unknown element tag',
{ funcName, elementTag }); { funcName, elementTag });
property.tag = elementTag; property.tag = elementTag;
element['Unknown Tag'] = property; element['Unknown Tag'] = property;
} else { } else {
@ -83,8 +83,8 @@ function TTLVCodec() {
if (tagInfo.name === 'Attribute Name') { if (tagInfo.name === 'Attribute Name') {
if (property.type !== 'TextString') { if (property.type !== 'TextString') {
_throwError(logger, _throwError(logger,
'Invalide type', 'Invalide type',
{ funcName, type: property.type }); { funcName, type: property.type });
} }
diversion = property.value; diversion = property.value;
} }
@ -114,8 +114,8 @@ function TTLVCodec() {
} }
const itemResult = const itemResult =
TypeEncoder[itemType].encode(itemTagName, TypeEncoder[itemType].encode(itemTagName,
itemValue, itemValue,
itemDiversion); itemDiversion);
encodedValue = encodedValue encodedValue = encodedValue
.concat(_ttlvPadVector(itemResult)); .concat(_ttlvPadVector(itemResult));
}); });
@ -133,9 +133,9 @@ function TTLVCodec() {
const fixedLength = 4; const fixedLength = 4;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
return value.readUInt32BE(0); return value.readUInt32BE(0);
}, },
@ -156,16 +156,16 @@ function TTLVCodec() {
const fixedLength = 8; const fixedLength = 8;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
const longUInt = UINT32_MAX * value.readUInt32BE(0) + const longUInt = UINT32_MAX * value.readUInt32BE(0) +
value.readUInt32BE(4); value.readUInt32BE(4);
if (longUInt > Number.MAX_SAFE_INTEGER) { if (longUInt > Number.MAX_SAFE_INTEGER) {
_throwError(logger, _throwError(logger,
'53-bit overflow', '53-bit overflow',
{ funcName, longUInt }); { funcName, longUInt });
} }
return longUInt; return longUInt;
}, },
@ -200,9 +200,9 @@ function TTLVCodec() {
const fixedLength = 4; const fixedLength = 4;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
const enumValue = value.toString('hex'); const enumValue = value.toString('hex');
const actualTag = diversion ? TagEncoder[diversion].value : tag; const actualTag = diversion ? TagEncoder[diversion].value : tag;
@ -211,10 +211,10 @@ function TTLVCodec() {
!enumInfo.enumeration || !enumInfo.enumeration ||
!enumInfo.enumeration[enumValue]) { !enumInfo.enumeration[enumValue]) {
return { tag, return { tag,
value: enumValue, value: enumValue,
message: 'Unknown enumeration value', message: 'Unknown enumeration value',
diversion, diversion,
}; };
} }
return enumInfo.enumeration[enumValue]; return enumInfo.enumeration[enumValue];
}, },
@ -227,7 +227,7 @@ function TTLVCodec() {
const actualTag = diversion || tagName; const actualTag = diversion || tagName;
const encodedValue = const encodedValue =
Buffer.from(TagEncoder[actualTag].enumeration[value], Buffer.from(TagEncoder[actualTag].enumeration[value],
'hex'); 'hex');
return _ttlvPadVector([tag, type, length, encodedValue]); return _ttlvPadVector([tag, type, length, encodedValue]);
}, },
}, },
@ -238,9 +238,9 @@ function TTLVCodec() {
const fixedLength = 8; const fixedLength = 8;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
const msUInt = value.readUInt32BE(0); const msUInt = value.readUInt32BE(0);
const lsUInt = value.readUInt32BE(4); const lsUInt = value.readUInt32BE(4);
@ -267,7 +267,7 @@ function TTLVCodec() {
const length = Buffer.alloc(4); const length = Buffer.alloc(4);
length.writeUInt32BE(value.length); length.writeUInt32BE(value.length);
return _ttlvPadVector([tag, type, length, return _ttlvPadVector([tag, type, length,
Buffer.from(value, 'utf8')]); Buffer.from(value, 'utf8')]);
}, },
}, },
'08': { '08': {
@ -289,17 +289,17 @@ function TTLVCodec() {
const fixedLength = 8; const fixedLength = 8;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
const d = new Date(0); const d = new Date(0);
const utcSeconds = UINT32_MAX * value.readUInt32BE(0) + const utcSeconds = UINT32_MAX * value.readUInt32BE(0) +
value.readUInt32BE(4); value.readUInt32BE(4);
if (utcSeconds > Number.MAX_SAFE_INTEGER) { if (utcSeconds > Number.MAX_SAFE_INTEGER) {
_throwError(logger, _throwError(logger,
'53-bit overflow', '53-bit overflow',
{ funcName, utcSeconds }); { funcName, utcSeconds });
} }
d.setUTCSeconds(utcSeconds); d.setUTCSeconds(utcSeconds);
return d; return d;
@ -323,9 +323,9 @@ function TTLVCodec() {
const fixedLength = 4; const fixedLength = 4;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
return value.readInt32BE(0); return value.readInt32BE(0);
}, },
@ -415,8 +415,8 @@ function TTLVCodec() {
throw Error(`Unknown Type '${type}'`); throw Error(`Unknown Type '${type}'`);
} }
const itemValue = TypeEncoder[type].encode(key, const itemValue = TypeEncoder[type].encode(key,
item[key].value, item[key].value,
item[key].diversion); item[key].diversion);
result = result.concat(_ttlvPadVector(itemValue)); result = result.concat(_ttlvPadVector(itemValue));
}); });
}); });

View File

@ -275,11 +275,11 @@ class KMIP {
KMIP.Structure('Request Header', [ KMIP.Structure('Request Header', [
KMIP.Structure('Protocol Version', [ KMIP.Structure('Protocol Version', [
KMIP.Integer('Protocol Version Major', KMIP.Integer('Protocol Version Major',
this.protocolVersion.major), this.protocolVersion.major),
KMIP.Integer('Protocol Version Minor', KMIP.Integer('Protocol Version Minor',
this.protocolVersion.minor)]), this.protocolVersion.minor)]),
KMIP.Integer('Maximum Response Size', KMIP.Integer('Maximum Response Size',
this.maximumResponseSize), this.maximumResponseSize),
KMIP.Integer('Batch Count', 1)]), KMIP.Integer('Batch Count', 1)]),
KMIP.Structure('Batch Item', [ KMIP.Structure('Batch Item', [
KMIP.Enumeration('Operation', operation), KMIP.Enumeration('Operation', operation),
@ -292,7 +292,7 @@ class KMIP {
(err, conversation, rawResponse) => { (err, conversation, rawResponse) => {
if (err) { if (err) {
logger.error('KMIP::request: Failed to send message', logger.error('KMIP::request: Failed to send message',
{ error: err }); { error: err });
return cb(err); return cb(err);
} }
const response = this._decodeMessage(logger, rawResponse); const response = this._decodeMessage(logger, rawResponse);
@ -311,16 +311,16 @@ class KMIP {
this.transport.abortPipeline(conversation); this.transport.abortPipeline(conversation);
const error = Error('Invalid batch item ID returned'); const error = Error('Invalid batch item ID returned');
logger.error('KMIP::request: failed', logger.error('KMIP::request: failed',
{ resultUniqueBatchItemID, uuid, error }); { resultUniqueBatchItemID, uuid, error });
return cb(error); return cb(error);
} }
if (performedOperation !== operation) { if (performedOperation !== operation) {
this.transport.abortPipeline(conversation); this.transport.abortPipeline(conversation);
const error = Error('Operation mismatch', const error = Error('Operation mismatch',
{ got: performedOperation, { got: performedOperation,
expected: operation }); expected: operation });
logger.error('KMIP::request: Operation mismatch', logger.error('KMIP::request: Operation mismatch',
{ error }); { error });
return cb(error); return cb(error);
} }
if (resultStatus !== 'Success') { if (resultStatus !== 'Success') {
@ -331,17 +331,19 @@ class KMIP {
response.lookup( response.lookup(
'Response Message/Batch Item/Result Message')[0]; 'Response Message/Batch Item/Result Message')[0];
const error = Error('KMIP request failure', const error = Error('KMIP request failure',
{ resultStatus, { resultStatus,
resultReason, resultReason,
resultMessage }); resultMessage });
logger.error('KMIP::request: request failed', logger.error('KMIP::request: request failed',
{ error, resultStatus, { error, resultStatus,
resultReason, resultMessage }); resultReason, resultMessage });
return cb(error); return cb(error);
} }
return cb(null, response); return cb(null, response);
}); });
} }
} }

View File

@ -86,8 +86,8 @@ class TransportTemplate {
const deferedRequest = this.deferedRequests.shift(); const deferedRequest = this.deferedRequests.shift();
process.nextTick(() => { process.nextTick(() => {
this.send(logger, this.send(logger,
deferedRequest.encodedMessage, deferedRequest.encodedMessage,
deferedRequest.cb); deferedRequest.cb);
}); });
} else if (this.callbackPipeline.length === 0 && } else if (this.callbackPipeline.length === 0 &&
this.deferedRequests.length === 0 && this.deferedRequests.length === 0 &&

View File

@ -1,76 +0,0 @@
const httpServer = require('../http/server');
const werelogs = require('werelogs');
const errors = require('../../errors');
const ZenkoMetrics = require('../../metrics/ZenkoMetrics');
const { sendSuccess, sendError } = require('./Utils');
function checkStub(log) { // eslint-disable-line
return true;
}
class HealthProbeServer extends httpServer {
constructor(params) {
const logging = new werelogs.Logger('HealthProbeServer');
super(params.port, logging);
this.logging = logging;
this.setBindAddress(params.bindAddress || 'localhost');
// hooking our request processing function by calling the
// parent's method for that
this.onRequest(this._onRequest);
this._reqHandlers = {
'/_/health/liveness': this._onLiveness.bind(this),
'/_/health/readiness': this._onReadiness.bind(this),
'/_/monitoring/metrics': this._onMetrics.bind(this),
};
this._livenessCheck = params.livenessCheck || checkStub;
this._readinessCheck = params.readinessCheck || checkStub;
}
onLiveCheck(f) {
this._livenessCheck = f;
}
onReadyCheck(f) {
this._readinessCheck = f;
}
_onRequest(req, res) {
const log = this.logging.newRequestLogger();
log.debug('request received', { method: req.method,
url: req.url });
if (req.method !== 'GET') {
sendError(res, log, errors.MethodNotAllowed);
} else if (req.url in this._reqHandlers) {
this._reqHandlers[req.url](req, res, log);
} else {
sendError(res, log, errors.InvalidURI);
}
}
_onLiveness(req, res, log) {
if (this._livenessCheck(log)) {
sendSuccess(res, log);
} else {
sendError(res, log, errors.ServiceUnavailable);
}
}
_onReadiness(req, res, log) {
if (this._readinessCheck(log)) {
sendSuccess(res, log);
} else {
sendError(res, log, errors.ServiceUnavailable);
}
}
// expose metrics to Prometheus
_onMetrics(req, res) {
res.writeHead(200, {
'Content-Type': ZenkoMetrics.asPrometheusContentType(),
});
res.end(ZenkoMetrics.asPrometheus());
}
}
module.exports = HealthProbeServer;

View File

@ -3,17 +3,19 @@ const werelogs = require('werelogs');
const errors = require('../../errors'); const errors = require('../../errors');
const DEFAULT_LIVE_ROUTE = '/_/live'; const DEFAULT_LIVE_ROUTE = '/_/live';
const DEFAULT_READY_ROUTE = '/_/ready'; const DEFAULT_READY_ROUTE = '/_/live';
const DEFAULT_METRICS_ROUTE = '/metrics'; const DEFAULT_METRICS_ROUTE = '/_/metrics';
/** /**
* ProbeDelegate is used to handle probe checks. * ProbeDelegate is used to determine if a probe is successful or
* You can sendSuccess and sendError from Utils to handle success * if any errors are present.
* and failure conditions. * If everything is working as intended, it is a no-op.
* Otherwise, return a string representing what is failing.
* @callback ProbeDelegate * @callback ProbeDelegate
* @param { import('http').ServerResponse } res - HTTP response for writing * @param { import('http').ServerResponse } res - HTTP response for writing
* @param {werelogs.Logger} log - Werelogs instance for logging if you choose to * @param {werelogs.Logger} log - Werelogs instance for logging if you choose to
* @return {undefined} * @return {(string|undefined)} String representing issues to report. An empty
* string or undefined is used to represent no issues.
*/ */
/** /**
@ -89,7 +91,13 @@ class ProbeServer extends httpServer {
return; return;
} }
this._handlers.get(req.url)(res, log); const probeResponse = this._handlers.get(req.url)(res, log);
if (probeResponse !== undefined && probeResponse !== '') {
// Return an internal error with the response
errors.InternalError
.customizeDescription(probeResponse)
.writeResponse(res);
}
} }
} }

View File

@ -1,41 +0,0 @@
/**
* Send a successful HTTP response of 200 OK
* @param {http.ServerResponse} res - HTTP response for writing
* @param {werelogs.Logger} log - Werelogs instance for logging if you choose to
* @param {string} [message] - Message to send as response, defaults to OK
* @returns {undefined}
*/
function sendSuccess(res, log, message = 'OK') {
log.debug('replying with success');
res.writeHead(200);
res.end(message);
}
/**
* Send an Arsenal Error response
* @param {http.ServerResponse} res - HTTP response for writing
* @param {werelogs.Logger} log - Werelogs instance for logging if you choose to
* @param {ArsenalError} error - Error to send back to the user
* @param {string} [optMessage] - Message to use instead of the errors message
* @returns {undefined}
*/
function sendError(res, log, error, optMessage) {
const message = optMessage || error.description || '';
log.debug('sending back error response',
{
httpCode: error.code,
errorType: error.message,
error: message,
},
);
res.writeHead(error.code);
res.end(JSON.stringify({
errorType: error.message,
errorMessage: message,
}));
}
module.exports = {
sendSuccess,
sendError,
};

View File

@ -81,7 +81,6 @@ class RESTClient {
this.host = params.host; this.host = params.host;
this.port = params.port; this.port = params.port;
this.isPassthrough = params.isPassthrough || false;
this.setupLogging(params.logApi); this.setupLogging(params.logApi);
this.httpAgent = new HttpAgent({ this.httpAgent = new HttpAgent({
keepAlive: true, keepAlive: true,
@ -120,13 +119,11 @@ class RESTClient {
doRequest(method, headers, key, log, responseCb) { doRequest(method, headers, key, log, responseCb) {
const reqHeaders = headers || {}; const reqHeaders = headers || {};
const urlKey = key || ''; const urlKey = key || '';
const prefix = this.isPassthrough ?
constants.passthroughFileURL : constants.dataFileURL;
const reqParams = { const reqParams = {
hostname: this.host, hostname: this.host,
port: this.port, port: this.port,
method, method,
path: encodeURI(`${prefix}/${urlKey}`), path: `${constants.dataFileURL}/${urlKey}`,
headers: reqHeaders, headers: reqHeaders,
agent: this.httpAgent, agent: this.httpAgent,
}; };

View File

@ -7,7 +7,7 @@ const werelogs = require('werelogs');
const httpServer = require('../http/server'); const httpServer = require('../http/server');
const constants = require('../../constants'); const constants = require('../../constants');
const { parseURL } = require('./utils'); const utils = require('./utils');
const httpUtils = require('../http/utils'); const httpUtils = require('../http/utils');
const errors = require('../../errors'); const errors = require('../../errors');
@ -19,7 +19,7 @@ function setContentRange(response, byteRange, objectSize) {
const [start, end] = byteRange; const [start, end] = byteRange;
assert(start !== undefined && end !== undefined); assert(start !== undefined && end !== undefined);
response.setHeader('Content-Range', response.setHeader('Content-Range',
`bytes ${start}-${end}/${objectSize}`); `bytes ${start}-${end}/${objectSize}`);
} }
function sendError(res, log, error, optMessage) { function sendError(res, log, error, optMessage) {
@ -37,6 +37,42 @@ function sendError(res, log, error, optMessage) {
errorMessage: message })}\n`); errorMessage: message })}\n`);
} }
/**
* Parse the given url and return a pathInfo object. Sanity checks are
* performed.
*
* @param {String} urlStr - URL to parse
* @param {Boolean} expectKey - whether the command expects to see a
* key in the URL
* @return {Object} a pathInfo object with URL items containing the
* following attributes:
* - pathInfo.service {String} - The name of REST service ("DataFile")
* - pathInfo.key {String} - The requested key
*/
function parseURL(urlStr, expectKey) {
const urlObj = url.parse(urlStr);
const pathInfo = utils.explodePath(urlObj.path);
if (pathInfo.service !== constants.dataFileURL) {
throw errors.InvalidAction.customizeDescription(
`unsupported service '${pathInfo.service}'`);
}
if (expectKey && pathInfo.key === undefined) {
throw errors.MissingParameter.customizeDescription(
'URL is missing key');
}
if (!expectKey && pathInfo.key !== undefined) {
// note: we may implement rewrite functionality by allowing a
// key in the URL, though we may still provide the new key in
// the Location header to keep immutability property and
// atomicity of the update (we would just remove the old
// object when the new one has been written entirely in this
// case, saving a request over an equivalent PUT + DELETE).
throw errors.InvalidURI.customizeDescription(
'PUT url cannot contain a key');
}
return pathInfo;
}
/** /**
* @class * @class
* @classdesc REST Server interface * @classdesc REST Server interface
@ -45,6 +81,7 @@ function sendError(res, log, error, optMessage) {
* start() to start listening to the configured port. * start() to start listening to the configured port.
*/ */
class RESTServer extends httpServer { class RESTServer extends httpServer {
/** /**
* @constructor * @constructor
* @param {Object} params - constructor params * @param {Object} params - constructor params
@ -226,7 +263,7 @@ class RESTServer extends httpServer {
return sendError(res, log, err); return sendError(res, log, err);
} }
log.debug('sending back 200/206 response with contents', log.debug('sending back 200/206 response with contents',
{ key: pathInfo.key }); { key: pathInfo.key });
setContentLength(res, contentLength); setContentLength(res, contentLength);
res.setHeader('Accept-Ranges', 'bytes'); res.setHeader('Accept-Ranges', 'bytes');
if (byteRange) { if (byteRange) {
@ -264,7 +301,7 @@ class RESTServer extends httpServer {
return sendError(res, log, err); return sendError(res, log, err);
} }
log.debug('sending back 204 response to DELETE', log.debug('sending back 204 response to DELETE',
{ key: pathInfo.key }); { key: pathInfo.key });
res.writeHead(204); res.writeHead(204);
return res.end(() => { return res.end(() => {
log.debug('DELETE response sent', { key: pathInfo.key }); log.debug('DELETE response sent', { key: pathInfo.key });

View File

@ -1,68 +1,15 @@
'use strict'; // eslint-disable-line 'use strict'; // eslint-disable-line
const errors = require('../../errors'); const errors = require('../../errors');
const constants = require('../../constants');
const url = require('url');
const passthroughPrefixLength = constants.passthroughFileURL.length; module.exports.explodePath = function explodePath(path) {
function explodePath(path) {
if (path.startsWith(constants.passthroughFileURL)) {
const key = path.slice(passthroughPrefixLength + 1);
return {
service: constants.passthroughFileURL,
key: key.length > 0 ? key : undefined,
};
}
const pathMatch = /^(\/[a-zA-Z0-9]+)(\/([0-9a-f]*))?$/.exec(path); const pathMatch = /^(\/[a-zA-Z0-9]+)(\/([0-9a-f]*))?$/.exec(path);
if (pathMatch) { if (pathMatch) {
return { return {
service: pathMatch[1], service: pathMatch[1],
key: (pathMatch[3] !== undefined && pathMatch[3].length > 0 ? key: (pathMatch[3] !== undefined && pathMatch[3].length > 0 ?
pathMatch[3] : undefined), pathMatch[3] : undefined),
}; };
} }
throw errors.InvalidURI.customizeDescription('malformed URI'); throw errors.InvalidURI.customizeDescription('malformed URI');
}
/**
* Parse the given url and return a pathInfo object. Sanity checks are
* performed.
*
* @param {String} urlStr - URL to parse
* @param {Boolean} expectKey - whether the command expects to see a
* key in the URL
* @return {Object} a pathInfo object with URL items containing the
* following attributes:
* - pathInfo.service {String} - The name of REST service ("DataFile")
* - pathInfo.key {String} - The requested key
*/
function parseURL(urlStr, expectKey) {
const urlObj = url.parse(urlStr);
const pathInfo = explodePath(decodeURI(urlObj.path));
if ((pathInfo.service !== constants.dataFileURL)
&& (pathInfo.service !== constants.passthroughFileURL)) {
throw errors.InvalidAction.customizeDescription(
`unsupported service '${pathInfo.service}'`);
}
if (expectKey && pathInfo.key === undefined) {
throw errors.MissingParameter.customizeDescription(
'URL is missing key');
}
if (!expectKey && pathInfo.key !== undefined) {
// note: we may implement rewrite functionality by allowing a
// key in the URL, though we may still provide the new key in
// the Location header to keep immutability property and
// atomicity of the update (we would just remove the old
// object when the new one has been written entirely in this
// case, saving a request over an equivalent PUT + DELETE).
throw errors.InvalidURI.customizeDescription(
'PUT url cannot contain a key');
}
return pathInfo;
}
module.exports = {
explodePath,
parseURL,
}; };

View File

@ -17,6 +17,7 @@ const rpc = require('./rpc.js');
* RPC client object accessing the sub-level transparently. * RPC client object accessing the sub-level transparently.
*/ */
class LevelDbClient extends rpc.BaseClient { class LevelDbClient extends rpc.BaseClient {
/** /**
* @constructor * @constructor
* *
@ -77,6 +78,7 @@ class LevelDbClient extends rpc.BaseClient {
* env.subDb (env is passed as first parameter of received RPC calls). * env.subDb (env is passed as first parameter of received RPC calls).
*/ */
class LevelDbService extends rpc.BaseService { class LevelDbService extends rpc.BaseService {
/** /**
* @constructor * @constructor
* *

View File

@ -37,6 +37,7 @@ let streamRPCJSONObj;
* an error occurred). * an error occurred).
*/ */
class BaseClient extends EventEmitter { class BaseClient extends EventEmitter {
/** /**
* @constructor * @constructor
* *
@ -53,7 +54,7 @@ class BaseClient extends EventEmitter {
*/ */
constructor(params) { constructor(params) {
const { url, logger, callTimeoutMs, const { url, logger, callTimeoutMs,
streamMaxPendingAck, streamAckTimeoutMs } = params; streamMaxPendingAck, streamAckTimeoutMs } = params;
assert(url); assert(url);
assert(logger); assert(logger);
@ -81,11 +82,11 @@ class BaseClient extends EventEmitter {
_call(remoteCall, args, cb) { _call(remoteCall, args, cb) {
const wrapCb = (err, data) => { const wrapCb = (err, data) => {
cb(reconstructError(err), cb(reconstructError(err),
this.socketStreams.decodeStreams(data)); this.socketStreams.decodeStreams(data));
}; };
this.logger.debug('remote call', { remoteCall, args }); this.logger.debug('remote call', { remoteCall, args });
this.socket.emit('call', remoteCall, this.socket.emit('call', remoteCall,
this.socketStreams.encodeStreams(args), wrapCb); this.socketStreams.encodeStreams(args), wrapCb);
return undefined; return undefined;
} }
@ -112,8 +113,8 @@ class BaseClient extends EventEmitter {
throw new Error(`argument cb=${cb} is not a callback`); throw new Error(`argument cb=${cb} is not a callback`);
} }
async.timeout(this._call.bind(this), timeoutMs, async.timeout(this._call.bind(this), timeoutMs,
`operation ${remoteCall} timed out`)(remoteCall, `operation ${remoteCall} timed out`)(remoteCall,
args, cb); args, cb);
return undefined; return undefined;
} }
@ -141,7 +142,7 @@ class BaseClient extends EventEmitter {
const url = this.url; const url = this.url;
this.socket.on('error', err => { this.socket.on('error', err => {
this.logger.warn('connectivity error to the RPC service', this.logger.warn('connectivity error to the RPC service',
{ url, error: err }); { url, error: err });
}); });
this.socket.on('connect', () => { this.socket.on('connect', () => {
this.emit('connect'); this.emit('connect');
@ -155,7 +156,7 @@ class BaseClient extends EventEmitter {
this.getManifest((err, manifest) => { this.getManifest((err, manifest) => {
if (err) { if (err) {
this.logger.error('Error fetching manifest from RPC server', this.logger.error('Error fetching manifest from RPC server',
{ error: err }); { error: err });
} else { } else {
manifest.api.forEach(apiItem => { manifest.api.forEach(apiItem => {
this.createCall(apiItem.name); this.createCall(apiItem.name);
@ -250,6 +251,7 @@ class BaseClient extends EventEmitter {
* *
*/ */
class BaseService { class BaseService {
/** /**
* @constructor * @constructor
* *
@ -495,7 +497,7 @@ function RPCServer(params) {
conn.on('error', err => { conn.on('error', err => {
log.error('error on socket.io connection', log.error('error on socket.io connection',
{ namespace: service.namespace, error: err }); { namespace: service.namespace, error: err });
}); });
conn.on('call', (remoteCall, args, cb) => { conn.on('call', (remoteCall, args, cb) => {
const decodedArgs = streamsSocket.decodeStreams(args); const decodedArgs = streamsSocket.decodeStreams(args);
@ -645,8 +647,8 @@ streamRPCJSONObj = function _streamRPCJSONObj(obj, wstream, cb) {
// primitive types // primitive types
if (obj === undefined) { if (obj === undefined) {
wstream.write('null'); // if undefined elements are present in wstream.write('null'); // if undefined elements are present in
// arrays, convert them to JSON null // arrays, convert them to JSON null
// objects // objects
} else { } else {
wstream.write(JSON.stringify(obj)); wstream.write(JSON.stringify(obj));
} }

View File

@ -16,7 +16,7 @@ class SIOOutputStream extends stream.Writable {
constructor(socket, streamId, maxPendingAck, ackTimeoutMs) { constructor(socket, streamId, maxPendingAck, ackTimeoutMs) {
super({ objectMode: true }); super({ objectMode: true });
this._initOutputStream(socket, streamId, maxPendingAck, this._initOutputStream(socket, streamId, maxPendingAck,
ackTimeoutMs); ackTimeoutMs);
} }
_initOutputStream(socket, streamId, maxPendingAck, ackTimeoutMs) { _initOutputStream(socket, streamId, maxPendingAck, ackTimeoutMs) {
@ -194,7 +194,7 @@ class SIOStreamSocket {
this.socket.on('stream-data', (payload, cb) => { this.socket.on('stream-data', (payload, cb) => {
const { streamId, data } = payload; const { streamId, data } = payload;
log.debug('received \'stream-data\' event', log.debug('received \'stream-data\' event',
{ streamId, size: data.length }); { streamId, size: data.length });
const stream = this.remoteStreams[streamId]; const stream = this.remoteStreams[streamId];
if (!stream) { if (!stream) {
log.debug('no such remote stream registered', { streamId }); log.debug('no such remote stream registered', { streamId });
@ -280,15 +280,15 @@ class SIOStreamSocket {
let transportStream; let transportStream;
if (isReadStream) { if (isReadStream) {
transportStream = new SIOOutputStream(this, streamId, transportStream = new SIOOutputStream(this, streamId,
this.maxPendingAck, this.maxPendingAck,
this.ackTimeoutMs); this.ackTimeoutMs);
} else { } else {
transportStream = new SIOInputStream(this, streamId); transportStream = new SIOInputStream(this, streamId);
} }
this.localStreams[streamId] = arg; this.localStreams[streamId] = arg;
arg.once('close', () => { arg.once('close', () => {
log.debug('stream closed, removing from local streams', log.debug('stream closed, removing from local streams',
{ streamId }); { streamId });
delete this.localStreams[streamId]; delete this.localStreams[streamId];
}); });
arg.on('error', error => { arg.on('error', error => {
@ -350,8 +350,8 @@ class SIOStreamSocket {
stream = new SIOInputStream(this, streamId); stream = new SIOInputStream(this, streamId);
} else if (arg.writable) { } else if (arg.writable) {
stream = new SIOOutputStream(this, streamId, stream = new SIOOutputStream(this, streamId,
this.maxPendingAck, this.maxPendingAck,
this.ackTimeoutMs); this.ackTimeoutMs);
} else { } else {
throw new Error('can\'t decode stream neither readable ' + throw new Error('can\'t decode stream neither readable ' +
'nor writable'); 'nor writable');
@ -360,14 +360,14 @@ class SIOStreamSocket {
if (arg.readable) { if (arg.readable) {
stream.once('close', () => { stream.once('close', () => {
log.debug('stream closed, removing from remote streams', log.debug('stream closed, removing from remote streams',
{ streamId }); { streamId });
delete this.remoteStreams[streamId]; delete this.remoteStreams[streamId];
}); });
} }
if (arg.writable) { if (arg.writable) {
stream.once('finish', () => { stream.once('finish', () => {
log.debug('stream finished, removing from remote streams', log.debug('stream finished, removing from remote streams',
{ streamId }); { streamId });
delete this.remoteStreams[streamId]; delete this.remoteStreams[streamId];
}); });
} }
@ -399,7 +399,7 @@ class SIOStreamSocket {
_write(streamId, data, cb) { _write(streamId, data, cb) {
this.logger.debug('emit \'stream-data\' event', this.logger.debug('emit \'stream-data\' event',
{ streamId, size: data.length }); { streamId, size: data.length });
this.socket.emit('stream-data', { streamId, data }, cb); this.socket.emit('stream-data', { streamId, data }, cb);
} }

View File

@ -1,159 +0,0 @@
'use strict'; // eslint-disable-line strict
const { URL } = require('url');
const { decryptSecret } = require('../executables/pensieveCreds/utils');
function patchLocations(overlayLocations, creds, log) {
if (!overlayLocations) {
return {};
}
const locations = {};
Object.keys(overlayLocations).forEach(k => {
const l = overlayLocations[k];
const location = {
name: k,
objectId: l.objectId,
details: l.details || {},
locationType: l.locationType,
};
let supportsVersioning = false;
let pathStyle = process.env.CI_CEPH !== undefined;
switch (l.locationType) {
case 'location-mem-v1':
location.type = 'mem';
location.details = { supportsVersioning: true };
break;
case 'location-file-v1':
location.type = 'file';
location.details = { supportsVersioning: true };
break;
case 'location-azure-v1':
location.type = 'azure';
if (l.details.secretKey && l.details.secretKey.length > 0) {
location.details = {
bucketMatch: l.details.bucketMatch,
azureStorageEndpoint: l.details.endpoint,
azureStorageAccountName: l.details.accessKey,
azureStorageAccessKey: decryptSecret(creds,
l.details.secretKey),
azureContainerName: l.details.bucketName,
};
}
break;
case 'location-ceph-radosgw-s3-v1':
case 'location-scality-ring-s3-v1':
pathStyle = true; // fallthrough
case 'location-aws-s3-v1':
case 'location-wasabi-v1':
supportsVersioning = true; // fallthrough
case 'location-do-spaces-v1':
location.type = 'aws_s3';
if (l.details.secretKey && l.details.secretKey.length > 0) {
let https = true;
let awsEndpoint = l.details.endpoint ||
's3.amazonaws.com';
if (awsEndpoint.includes('://')) {
const url = new URL(awsEndpoint);
awsEndpoint = url.host;
https = url.protocol.includes('https');
}
location.details = {
credentials: {
accessKey: l.details.accessKey,
secretKey: decryptSecret(creds,
l.details.secretKey),
},
bucketName: l.details.bucketName,
bucketMatch: l.details.bucketMatch,
serverSideEncryption:
Boolean(l.details.serverSideEncryption),
region: l.details.region,
awsEndpoint,
supportsVersioning,
pathStyle,
https,
};
}
break;
case 'location-gcp-v1':
location.type = 'gcp';
if (l.details.secretKey && l.details.secretKey.length > 0) {
location.details = {
credentials: {
accessKey: l.details.accessKey,
secretKey: decryptSecret(creds,
l.details.secretKey),
},
bucketName: l.details.bucketName,
mpuBucketName: l.details.mpuBucketName,
bucketMatch: l.details.bucketMatch,
gcpEndpoint: l.details.endpoint ||
'storage.googleapis.com',
https: true,
};
}
break;
case 'location-scality-sproxyd-v1':
location.type = 'scality';
if (l.details && l.details.bootstrapList &&
l.details.proxyPath) {
location.details = {
supportsVersioning: true,
connector: {
sproxyd: {
chordCos: l.details.chordCos || null,
bootstrap: l.details.bootstrapList,
path: l.details.proxyPath,
},
},
};
}
break;
case 'location-nfs-mount-v1':
location.type = 'pfs';
if (l.details) {
location.details = {
supportsVersioning: true,
bucketMatch: true,
pfsDaemonEndpoint: {
host: `${l.name}-cosmos-pfsd`,
port: 80,
},
};
}
break;
case 'location-scality-hdclient-v2':
location.type = 'scality';
if (l.details && l.details.bootstrapList) {
location.details = {
supportsVersioning: true,
connector: {
hdclient: {
bootstrap: l.details.bootstrapList,
},
},
};
}
break;
default:
log.info(
'unknown location type',
{ locationType: l.locationType },
);
return;
}
location.sizeLimitGB = l.sizeLimitGB || null;
location.isTransient = Boolean(l.isTransient);
location.legacyAwsBehavior = Boolean(l.legacyAwsBehavior);
locations[location.name] = location;
return;
});
return locations;
}
module.exports = {
patchLocations,
};

View File

@ -38,10 +38,6 @@
"type": "string", "type": "string",
"pattern": "^arn:aws:iam::[0-9]{12}:saml-provider/[\\w._-]{1,128}$" "pattern": "^arn:aws:iam::[0-9]{12}:saml-provider/[\\w._-]{1,128}$"
}, },
"principalFederatedOidcIdp": {
"type": "string",
"pattern": "^(?:http(s)?:\/\/)?[\\w.-]+(?:\\.[\\w\\.-]+)+[\\w\\-\\._~:/?#[\\]@!\\$&'\\(\\)\\*\\+,;=.]+$"
},
"principalAWSItem": { "principalAWSItem": {
"type": "object", "type": "object",
"properties": { "properties": {
@ -102,9 +98,6 @@
"oneOf": [ "oneOf": [
{ {
"$ref": "#/definitions/principalFederatedSamlIdp" "$ref": "#/definitions/principalFederatedSamlIdp"
},
{
"$ref": "#/definitions/principalFederatedOidcIdp"
} }
] ]
} }

View File

@ -50,7 +50,7 @@ evaluators.isResourceApplicable = (requestContext, statementResource, log) => {
requestResourceArr, true); requestResourceArr, true);
if (arnSegmentsMatch) { if (arnSegmentsMatch) {
log.trace('policy resource is applicable to request', log.trace('policy resource is applicable to request',
{ requestResource: resource, policyResource }); { requestResource: resource, policyResource });
return true; return true;
} }
continue; continue;
@ -224,21 +224,21 @@ evaluators.evaluatePolicy = (requestContext, policy, log) => {
// in policy, move on to next statement // in policy, move on to next statement
if (currentStatement.NotResource && if (currentStatement.NotResource &&
evaluators.isResourceApplicable(requestContext, evaluators.isResourceApplicable(requestContext,
currentStatement.NotResource, log)) { currentStatement.NotResource, log)) {
continue; continue;
} }
// If affirmative action is in policy and request action is not // If affirmative action is in policy and request action is not
// applicable, move on to next statement // applicable, move on to next statement
if (currentStatement.Action && if (currentStatement.Action &&
!evaluators.isActionApplicable(requestContext.getAction(), !evaluators.isActionApplicable(requestContext.getAction(),
currentStatement.Action, log)) { currentStatement.Action, log)) {
continue; continue;
} }
// If NotAction is in policy and action matches NotAction in policy, // If NotAction is in policy and action matches NotAction in policy,
// move on to next statement // move on to next statement
if (currentStatement.NotAction && if (currentStatement.NotAction &&
evaluators.isActionApplicable(requestContext.getAction(), evaluators.isActionApplicable(requestContext.getAction(),
currentStatement.NotAction, log)) { currentStatement.NotAction, log)) {
continue; continue;
} }
const conditionEval = currentStatement.Condition ? const conditionEval = currentStatement.Condition ?

View File

@ -30,7 +30,6 @@ const sharedActionMap = {
bypassGovernanceRetention: 's3:BypassGovernanceRetention', bypassGovernanceRetention: 's3:BypassGovernanceRetention',
listMultipartUploads: 's3:ListBucketMultipartUploads', listMultipartUploads: 's3:ListBucketMultipartUploads',
listParts: 's3:ListMultipartUploadParts', listParts: 's3:ListMultipartUploadParts',
metadataSearch: 's3:MetadataSearch',
multipartDelete: 's3:AbortMultipartUpload', multipartDelete: 's3:AbortMultipartUpload',
objectDelete: 's3:DeleteObject', objectDelete: 's3:DeleteObject',
objectDeleteTagging: 's3:DeleteObjectTagging', objectDeleteTagging: 's3:DeleteObjectTagging',
@ -117,7 +116,6 @@ const actionMonitoringMapS3 = {
initiateMultipartUpload: 'CreateMultipartUpload', initiateMultipartUpload: 'CreateMultipartUpload',
listMultipartUploads: 'ListMultipartUploads', listMultipartUploads: 'ListMultipartUploads',
listParts: 'ListParts', listParts: 'ListParts',
metadataSearch: 'MetadataSearch',
multiObjectDelete: 'DeleteObjects', multiObjectDelete: 'DeleteObjects',
multipartDelete: 'AbortMultipartUpload', multipartDelete: 'AbortMultipartUpload',
objectCopy: 'CopyObject', objectCopy: 'CopyObject',
@ -161,7 +159,6 @@ const actionMapIAM = {
getPolicyVersion: 'iam:GetPolicyVersion', getPolicyVersion: 'iam:GetPolicyVersion',
getUser: 'iam:GetUser', getUser: 'iam:GetUser',
listAccessKeys: 'iam:ListAccessKeys', listAccessKeys: 'iam:ListAccessKeys',
listEntitiesForPolicy: 'iam:ListEntitiesForPolicy',
listGroupPolicies: 'iam:ListGroupPolicies', listGroupPolicies: 'iam:ListGroupPolicies',
listGroups: 'iam:ListGroups', listGroups: 'iam:ListGroups',
listGroupsForUser: 'iam:ListGroupsForUser', listGroupsForUser: 'iam:ListGroupsForUser',

View File

@ -39,11 +39,11 @@ conditions.findConditionKey = (key, requestContext) => {
// (see Boolean Condition Operators). // (see Boolean Condition Operators).
// Note: This key is only present if MFA was used. So, the following // Note: This key is only present if MFA was used. So, the following
// will not work: // will not work:
// "Condition" : // "Condition" :
// { "Bool" : { "aws:MultiFactorAuthPresent" : false } } // { "Bool" : { "aws:MultiFactorAuthPresent" : false } }
// Instead use: // Instead use:
// "Condition" : // "Condition" :
// { "Null" : { "aws:MultiFactorAuthPresent" : true } } // { "Null" : { "aws:MultiFactorAuthPresent" : true } }
map.set('aws:MultiFactorAuthPresent', map.set('aws:MultiFactorAuthPresent',
requestContext.getMultiFactorAuthPresent()); requestContext.getMultiFactorAuthPresent());
// aws:MultiFactorAuthAge Used to check how many seconds since // aws:MultiFactorAuthAge Used to check how many seconds since
@ -146,8 +146,6 @@ conditions.findConditionKey = (key, requestContext) => {
map.set('s3:ObjLocationConstraint', map.set('s3:ObjLocationConstraint',
headers['x-amz-meta-scal-location-constraint']); headers['x-amz-meta-scal-location-constraint']);
map.set('sts:ExternalId', requestContext.getRequesterExternalId()); map.set('sts:ExternalId', requestContext.getRequesterExternalId());
map.set('keycloak:groups', requesterInfo.keycloakGroup);
map.set('keycloak:roles', requesterInfo.keycloakRole);
map.set('iam:PolicyArn', requestContext.getPolicyArn()); map.set('iam:PolicyArn', requestContext.getPolicyArn());
// s3:ExistingObjectTag - Used to check that existing object tag has // s3:ExistingObjectTag - Used to check that existing object tag has
// specific tag key and value. Extraction of correct tag key is done in CloudServer. // specific tag key and value. Extraction of correct tag key is done in CloudServer.
@ -166,8 +164,8 @@ conditions.findConditionKey = (key, requestContext) => {
// so evaluation should be skipped // so evaluation should be skipped
map.set('s3:RequestObjectTagKeys', map.set('s3:RequestObjectTagKeys',
requestContext.getNeedTagEval() && requestContext.getRequestObjTags() requestContext.getNeedTagEval() && requestContext.getRequestObjTags()
? getTagKeys(requestContext.getRequestObjTags()) ? getTagKeys(requestContext.getRequestObjTags())
: undefined); : undefined);
return map.get(key); return map.get(key);
}; };
@ -191,7 +189,7 @@ function convertSpecialChars(string) {
return map[char]; return map[char];
} }
return string.replace(/(\$\{\*\})|(\$\{\?\})|(\$\{\$\})/g, return string.replace(/(\$\{\*\})|(\$\{\?\})|(\$\{\$\})/g,
characterMap); characterMap);
} }
/** /**
@ -425,10 +423,10 @@ conditions.convertConditionOperator = operator => {
return !operatorMap.ArnLike(key, value); return !operatorMap.ArnLike(key, value);
}, },
Null: function nullOperator(key, value) { Null: function nullOperator(key, value) {
// Null is used to check if a condition key is present. // Null is used to check if a condition key is present.
// The policy statement value should be either true (the key doesn't // The policy statement value should be either true (the key doesn't
// exist — it is null) or false (the key exists and its value is // exist — it is null) or false (the key exists and its value is
// not null). // not null).
if ((key === undefined || key === null) if ((key === undefined || key === null)
&& value[0] === 'true' || && value[0] === 'true' ||
(key !== undefined && key !== null) (key !== undefined && key !== null)

View File

@ -51,10 +51,10 @@ wildcards.handleWildcardInResource = arn => {
// Wildcards can be part of the resource ARN. // Wildcards can be part of the resource ARN.
// Wildcards do NOT span segments of the ARN (separated by ":") // Wildcards do NOT span segments of the ARN (separated by ":")
// Example: all elements in specific bucket: // Example: all elements in specific bucket:
// "Resource": "arn:aws:s3:::my_corporate_bucket/*" // "Resource": "arn:aws:s3:::my_corporate_bucket/*"
// ARN format: // ARN format:
// arn:partition:service:region:namespace:relative-id // arn:partition:service:region:namespace:relative-id
const arnArr = arn.split(':'); const arnArr = arn.split(':');
return arnArr.map(portion => wildcards.handleWildcards(portion)); return arnArr.map(portion => wildcards.handleWildcards(portion));
}; };

View File

@ -6,6 +6,7 @@ const crypto = require('crypto');
* data through a stream * data through a stream
*/ */
class MD5Sum extends Transform { class MD5Sum extends Transform {
/** /**
* @constructor * @constructor
*/ */
@ -39,6 +40,7 @@ class MD5Sum extends Transform {
this.emit('hashed'); this.emit('hashed');
callback(null); callback(null);
} }
} }
module.exports = MD5Sum; module.exports = MD5Sum;

View File

@ -73,7 +73,7 @@ class ResultsCollector extends EventEmitter {
* @property {Error} [results[].error] - error returned by Azure putting subpart * @property {Error} [results[].error] - error returned by Azure putting subpart
* @property {number} results[].subPartIndex - index of the subpart * @property {number} results[].subPartIndex - index of the subpart
*/ */
/** /**
* "error" event * "error" event
* @event ResultCollector#error * @event ResultCollector#error
* @type {(Error|undefined)} error - error returned by Azure last subpart * @type {(Error|undefined)} error - error returned by Azure last subpart

View File

@ -94,7 +94,7 @@ azureMpuUtils.getSubPartIds = (part, uploadId) =>
azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex)); azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex));
azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName, azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName,
log, cb) => { log, cb) => {
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId } const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
= params; = params;
const blockId = azureMpuUtils.getBlockId(uploadId, partNumber, 0); const blockId = azureMpuUtils.getBlockId(uploadId, partNumber, 0);
@ -107,31 +107,31 @@ azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName,
request.pipe(passThrough); request.pipe(passThrough);
return errorWrapperFn('uploadPart', 'createBlockFromStream', return errorWrapperFn('uploadPart', 'createBlockFromStream',
[blockId, bucketName, objectKey, passThrough, size, options, [blockId, bucketName, objectKey, passThrough, size, options,
(err, result) => { (err, result) => {
if (err) { if (err) {
log.error('Error from Azure data backend uploadPart', log.error('Error from Azure data backend uploadPart',
{ error: err.message, dataStoreName }); { error: err.message, dataStoreName });
if (err.code === 'ContainerNotFound') { if (err.code === 'ContainerNotFound') {
return cb(errors.NoSuchBucket); return cb(errors.NoSuchBucket);
}
if (err.code === 'InvalidMd5') {
return cb(errors.InvalidDigest);
}
if (err.code === 'Md5Mismatch') {
return cb(errors.BadDigest);
}
return cb(errors.InternalError.customizeDescription(
`Error returned from Azure: ${err.message}`),
);
} }
const md5 = result.headers['content-md5'] || ''; if (err.code === 'InvalidMd5') {
const eTag = objectUtils.getHexMD5(md5); return cb(errors.InvalidDigest);
return cb(null, eTag, size); }
}], log, cb); if (err.code === 'Md5Mismatch') {
return cb(errors.BadDigest);
}
return cb(errors.InternalError.customizeDescription(
`Error returned from Azure: ${err.message}`)
);
}
const md5 = result.headers['content-md5'] || '';
const eTag = objectUtils.getHexMD5(md5);
return cb(null, eTag, size);
}], log, cb);
}; };
azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo, azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo,
subPartStream, subPartIndex, resultsCollector, log, cb) => { subPartStream, subPartIndex, resultsCollector, log, cb) => {
const { uploadId, partNumber, bucketName, objectKey } = partParams; const { uploadId, partNumber, bucketName, objectKey } = partParams;
const subPartSize = azureMpuUtils.getSubPartSize( const subPartSize = azureMpuUtils.getSubPartSize(
subPartInfo, subPartIndex); subPartInfo, subPartIndex);
@ -140,11 +140,11 @@ azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo,
resultsCollector.pushOp(); resultsCollector.pushOp();
errorWrapperFn('uploadPart', 'createBlockFromStream', errorWrapperFn('uploadPart', 'createBlockFromStream',
[subPartId, bucketName, objectKey, subPartStream, subPartSize, [subPartId, bucketName, objectKey, subPartStream, subPartSize,
{}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb); {}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb);
}; };
azureMpuUtils.putSubParts = (errorWrapperFn, request, params, azureMpuUtils.putSubParts = (errorWrapperFn, request, params,
dataStoreName, log, cb) => { dataStoreName, log, cb) => {
const subPartInfo = azureMpuUtils.getSubPartInfo(params.size); const subPartInfo = azureMpuUtils.getSubPartInfo(params.size);
const resultsCollector = new ResultsCollector(); const resultsCollector = new ResultsCollector();
const hashedStream = new MD5Sum(); const hashedStream = new MD5Sum();

View File

@ -31,9 +31,9 @@ convertMethods.listMultipartUploads = xmlParams => {
const l = xmlParams.list; const l = xmlParams.list;
xml.push('<?xml version="1.0" encoding="UTF-8"?>', xml.push('<?xml version="1.0" encoding="UTF-8"?>',
'<ListMultipartUploadsResult ' + '<ListMultipartUploadsResult ' +
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">', 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`, `<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`
); );
// For certain XML elements, if it is `undefined`, AWS returns either an // For certain XML elements, if it is `undefined`, AWS returns either an
@ -58,7 +58,7 @@ convertMethods.listMultipartUploads = xmlParams => {
}); });
xml.push(`<MaxUploads>${escapeForXml(l.MaxKeys)}</MaxUploads>`, xml.push(`<MaxUploads>${escapeForXml(l.MaxKeys)}</MaxUploads>`,
`<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`, `<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`
); );
l.Uploads.forEach(upload => { l.Uploads.forEach(upload => {
@ -69,29 +69,29 @@ convertMethods.listMultipartUploads = xmlParams => {
} }
xml.push('<Upload>', xml.push('<Upload>',
`<Key>${escapeForXml(key)}</Key>`, `<Key>${escapeForXml(key)}</Key>`,
`<UploadId>${escapeForXml(val.UploadId)}</UploadId>`, `<UploadId>${escapeForXml(val.UploadId)}</UploadId>`,
'<Initiator>', '<Initiator>',
`<ID>${escapeForXml(val.Initiator.ID)}</ID>`, `<ID>${escapeForXml(val.Initiator.ID)}</ID>`,
`<DisplayName>${escapeForXml(val.Initiator.DisplayName)}` + `<DisplayName>${escapeForXml(val.Initiator.DisplayName)}` +
'</DisplayName>', '</DisplayName>',
'</Initiator>', '</Initiator>',
'<Owner>', '<Owner>',
`<ID>${escapeForXml(val.Owner.ID)}</ID>`, `<ID>${escapeForXml(val.Owner.ID)}</ID>`,
`<DisplayName>${escapeForXml(val.Owner.DisplayName)}` + `<DisplayName>${escapeForXml(val.Owner.DisplayName)}` +
'</DisplayName>', '</DisplayName>',
'</Owner>', '</Owner>',
`<StorageClass>${escapeForXml(val.StorageClass)}` + `<StorageClass>${escapeForXml(val.StorageClass)}` +
'</StorageClass>', '</StorageClass>',
`<Initiated>${escapeForXml(val.Initiated)}</Initiated>`, `<Initiated>${escapeForXml(val.Initiated)}</Initiated>`,
'</Upload>', '</Upload>'
); );
}); });
l.CommonPrefixes.forEach(prefix => { l.CommonPrefixes.forEach(prefix => {
xml.push('<CommonPrefixes>', xml.push('<CommonPrefixes>',
`<Prefix>${escapeForXml(prefix)}</Prefix>`, `<Prefix>${escapeForXml(prefix)}</Prefix>`,
'</CommonPrefixes>', '</CommonPrefixes>'
); );
}); });

View File

@ -5,6 +5,7 @@ const Readable = require('stream').Readable;
* This class is used to produce zeros filled buffers for a reader consumption * This class is used to produce zeros filled buffers for a reader consumption
*/ */
class NullStream extends Readable { class NullStream extends Readable {
/** /**
* Construct a new zeros filled buffers producer that will * Construct a new zeros filled buffers producer that will
* produce as much bytes as specified by the range parameter, or the size * produce as much bytes as specified by the range parameter, or the size
@ -31,8 +32,8 @@ class NullStream extends Readable {
_read(size) { _read(size) {
const toRead = Math.min(size, this.bytesToRead); const toRead = Math.min(size, this.bytesToRead);
const buffer = toRead > 0 const buffer = toRead > 0
? Buffer.alloc(toRead, 0) ? Buffer.alloc(toRead, 0)
: null; : null;
this.bytesToRead -= toRead; this.bytesToRead -= toRead;
this.push(buffer); this.push(buffer);
} }

View File

@ -4,11 +4,11 @@ const errors = require('../errors');
const escapeForXml = require('./escapeForXml'); const escapeForXml = require('./escapeForXml');
const errorInvalidArgument = errors.InvalidArgument const errorInvalidArgument = errors.InvalidArgument
.customizeDescription('The header \'x-amz-tagging\' shall be ' + .customizeDescription('The header \'x-amz-tagging\' shall be ' +
'encoded as UTF-8 then URLEncoded URL query parameters without ' + 'encoded as UTF-8 then URLEncoded URL query parameters without ' +
'tag name duplicates.'); 'tag name duplicates.');
const errorBadRequestLimit50 = errors.BadRequest const errorBadRequestLimit50 = errors.BadRequest
.customizeDescription('Object tags cannot be greater than 50'); .customizeDescription('Object tags cannot be greater than 50');
/* /*
Format of xml request: Format of xml request:
@ -38,7 +38,7 @@ const _validator = {
result.Tagging.TagSet && result.Tagging.TagSet &&
result.Tagging.TagSet.length === 1 && result.Tagging.TagSet.length === 1 &&
( (
result.Tagging.TagSet[0] === '' || result.Tagging.TagSet[0] === '' ||
result.Tagging.TagSet[0] && result.Tagging.TagSet[0] &&
Object.keys(result.Tagging.TagSet[0]).length === 1 && Object.keys(result.Tagging.TagSet[0]).length === 1 &&
result.Tagging.TagSet[0].Tag && result.Tagging.TagSet[0].Tag &&
@ -155,7 +155,7 @@ function parseTagXml(xml, log, cb) {
function convertToXml(objectTags) { function convertToXml(objectTags) {
const xml = []; const xml = [];
xml.push('<?xml version="1.0" encoding="UTF-8" standalone="yes"?>', xml.push('<?xml version="1.0" encoding="UTF-8" standalone="yes"?>',
'<Tagging> <TagSet>'); '<Tagging> <TagSet>');
if (objectTags && Object.keys(objectTags).length > 0) { if (objectTags && Object.keys(objectTags).length > 0) {
Object.keys(objectTags).forEach(key => { Object.keys(objectTags).forEach(key => {
xml.push(`<Tag><Key>${escapeForXml(key)}</Key>` + xml.push(`<Tag><Key>${escapeForXml(key)}</Key>` +

View File

@ -68,31 +68,6 @@ function _checkUnmodifiedSince(ifUnmodifiedSinceTime, lastModified) {
return res; return res;
} }
/**
* checks 'if-modified-since' and 'if-unmodified-since' headers if included in
* request against last-modified date of object
* @param {object} headers - headers from request object
* @param {string} lastModified - last modified date of object
* @return {object} contains modifiedSince and unmodifiedSince res objects
*/
function checkDateModifiedHeaders(headers, lastModified) {
let lastModifiedDate = new Date(lastModified);
lastModifiedDate.setMilliseconds(0);
lastModifiedDate = lastModifiedDate.getTime();
const ifModifiedSinceHeader = headers['if-modified-since'] ||
headers['x-amz-copy-source-if-modified-since'];
const ifUnmodifiedSinceHeader = headers['if-unmodified-since'] ||
headers['x-amz-copy-source-if-unmodified-since'];
const modifiedSinceRes = _checkModifiedSince(ifModifiedSinceHeader,
lastModifiedDate);
const unmodifiedSinceRes = _checkUnmodifiedSince(ifUnmodifiedSinceHeader,
lastModifiedDate);
return { modifiedSinceRes, unmodifiedSinceRes };
}
/** /**
* validateConditionalHeaders - validates 'if-modified-since', * validateConditionalHeaders - validates 'if-modified-since',
* 'if-unmodified-since', 'if-match' or 'if-none-match' headers if included in * 'if-unmodified-since', 'if-match' or 'if-none-match' headers if included in
@ -104,14 +79,23 @@ function checkDateModifiedHeaders(headers, lastModified) {
* empty object if no error * empty object if no error
*/ */
function validateConditionalHeaders(headers, lastModified, contentMD5) { function validateConditionalHeaders(headers, lastModified, contentMD5) {
let lastModifiedDate = new Date(lastModified);
lastModifiedDate.setMilliseconds(0);
lastModifiedDate = lastModifiedDate.getTime();
const ifMatchHeader = headers['if-match'] || const ifMatchHeader = headers['if-match'] ||
headers['x-amz-copy-source-if-match']; headers['x-amz-copy-source-if-match'];
const ifNoneMatchHeader = headers['if-none-match'] || const ifNoneMatchHeader = headers['if-none-match'] ||
headers['x-amz-copy-source-if-none-match']; headers['x-amz-copy-source-if-none-match'];
const ifModifiedSinceHeader = headers['if-modified-since'] ||
headers['x-amz-copy-source-if-modified-since'];
const ifUnmodifiedSinceHeader = headers['if-unmodified-since'] ||
headers['x-amz-copy-source-if-unmodified-since'];
const etagMatchRes = _checkEtagMatch(ifMatchHeader, contentMD5); const etagMatchRes = _checkEtagMatch(ifMatchHeader, contentMD5);
const etagNoneMatchRes = _checkEtagNoneMatch(ifNoneMatchHeader, contentMD5); const etagNoneMatchRes = _checkEtagNoneMatch(ifNoneMatchHeader, contentMD5);
const { modifiedSinceRes, unmodifiedSinceRes } = const modifiedSinceRes = _checkModifiedSince(ifModifiedSinceHeader,
checkDateModifiedHeaders(headers, lastModified); lastModifiedDate);
const unmodifiedSinceRes = _checkUnmodifiedSince(ifUnmodifiedSinceHeader,
lastModifiedDate);
// If-Unmodified-Since condition evaluates to false and If-Match // If-Unmodified-Since condition evaluates to false and If-Match
// is not present, then return the error. Otherwise, If-Unmodified-Since is // is not present, then return the error. Otherwise, If-Unmodified-Since is
// silent when If-Match match, and when If-Match does not match, it's the // silent when If-Match match, and when If-Match does not match, it's the
@ -136,6 +120,5 @@ module.exports = {
_checkEtagNoneMatch, _checkEtagNoneMatch,
_checkModifiedSince, _checkModifiedSince,
_checkUnmodifiedSince, _checkUnmodifiedSince,
checkDateModifiedHeaders,
validateConditionalHeaders, validateConditionalHeaders,
}; };

View File

@ -10,7 +10,6 @@ const routeOPTIONS = require('./routes/routeOPTIONS');
const routesUtils = require('./routesUtils'); const routesUtils = require('./routesUtils');
const routeWebsite = require('./routes/routeWebsite'); const routeWebsite = require('./routes/routeWebsite');
const { objectKeyByteLimit } = require('../constants');
const requestUtils = require('../../lib/policyEvaluator/requestUtils'); const requestUtils = require('../../lib/policyEvaluator/requestUtils');
const routeMap = { const routeMap = {
@ -43,7 +42,7 @@ function checkBucketAndKey(bucketName, objectKey, method, reqQuery,
log.debug('empty bucket name', { method: 'routes' }); log.debug('empty bucket name', { method: 'routes' });
return (method !== 'OPTIONS') ? return (method !== 'OPTIONS') ?
errors.MethodNotAllowed : errors.AccessForbidden errors.MethodNotAllowed : errors.AccessForbidden
.customizeDescription('CORSResponse: Bucket not found'); .customizeDescription('CORSResponse: Bucket not found');
} }
if (bucketName !== undefined && routesUtils.isValidBucketName(bucketName, if (bucketName !== undefined && routesUtils.isValidBucketName(bucketName,
blacklistedPrefixes.bucket) === false) { blacklistedPrefixes.bucket) === false) {
@ -58,14 +57,8 @@ function checkBucketAndKey(bucketName, objectKey, method, reqQuery,
blacklistedPrefixes.object); blacklistedPrefixes.object);
if (!result.isValid) { if (!result.isValid) {
log.debug('invalid object key', { objectKey }); log.debug('invalid object key', { objectKey });
if (result.invalidPrefix) { return errors.InvalidArgument.customizeDescription('Object key ' +
return errors.InvalidArgument.customizeDescription('Invalid ' + `must not start with "${result.invalidPrefix}".`);
'prefix - object key cannot start with ' +
`"${result.invalidPrefix}".`);
}
return errors.KeyTooLong.customizeDescription('Object key is too ' +
'long. Maximum number of bytes allowed in keys is ' +
`${objectKeyByteLimit}.`);
} }
} }
if ((reqQuery.partNumber || reqQuery.uploadId) if ((reqQuery.partNumber || reqQuery.uploadId)
@ -92,7 +85,7 @@ function checkTypes(req, res, params, logger, s3config) {
'bad routes param: internalHandlers must be an object'); 'bad routes param: internalHandlers must be an object');
if (params.statsClient) { if (params.statsClient) {
assert.strictEqual(typeof params.statsClient, 'object', assert.strictEqual(typeof params.statsClient, 'object',
'bad routes param: statsClient must be an object'); 'bad routes param: statsClient must be an object');
} }
assert(Array.isArray(params.allEndpoints), assert(Array.isArray(params.allEndpoints),
'bad routes param: allEndpoints must be an array'); 'bad routes param: allEndpoints must be an array');
@ -100,13 +93,13 @@ function checkTypes(req, res, params, logger, s3config) {
'bad routes param: allEndpoints must have at least one endpoint'); 'bad routes param: allEndpoints must have at least one endpoint');
params.allEndpoints.forEach(endpoint => { params.allEndpoints.forEach(endpoint => {
assert.strictEqual(typeof endpoint, 'string', assert.strictEqual(typeof endpoint, 'string',
'bad routes param: each item in allEndpoints must be a string'); 'bad routes param: each item in allEndpoints must be a string');
}); });
assert(Array.isArray(params.websiteEndpoints), assert(Array.isArray(params.websiteEndpoints),
'bad routes param: allEndpoints must be an array'); 'bad routes param: allEndpoints must be an array');
params.websiteEndpoints.forEach(endpoint => { params.websiteEndpoints.forEach(endpoint => {
assert.strictEqual(typeof endpoint, 'string', assert.strictEqual(typeof endpoint, 'string',
'bad routes param: each item in websiteEndpoints must be a string'); 'bad routes param: each item in websiteEndpoints must be a string');
}); });
assert.strictEqual(typeof params.blacklistedPrefixes, 'object', assert.strictEqual(typeof params.blacklistedPrefixes, 'object',
'bad routes param: blacklistedPrefixes must be an object'); 'bad routes param: blacklistedPrefixes must be an object');
@ -114,16 +107,16 @@ function checkTypes(req, res, params, logger, s3config) {
'bad routes param: blacklistedPrefixes.bucket must be an array'); 'bad routes param: blacklistedPrefixes.bucket must be an array');
params.blacklistedPrefixes.bucket.forEach(pre => { params.blacklistedPrefixes.bucket.forEach(pre => {
assert.strictEqual(typeof pre, 'string', assert.strictEqual(typeof pre, 'string',
'bad routes param: each blacklisted bucket prefix must be a string'); 'bad routes param: each blacklisted bucket prefix must be a string');
}); });
assert(Array.isArray(params.blacklistedPrefixes.object), assert(Array.isArray(params.blacklistedPrefixes.object),
'bad routes param: blacklistedPrefixes.object must be an array'); 'bad routes param: blacklistedPrefixes.object must be an array');
params.blacklistedPrefixes.object.forEach(pre => { params.blacklistedPrefixes.object.forEach(pre => {
assert.strictEqual(typeof pre, 'string', assert.strictEqual(typeof pre, 'string',
'bad routes param: each blacklisted object prefix must be a string'); 'bad routes param: each blacklisted object prefix must be a string');
}); });
assert.strictEqual(typeof params.dataRetrievalParams, 'object', assert.strictEqual(typeof params.dataRetrievalFn, 'function',
'bad routes param: dataRetrievalParams must be a defined object'); 'bad routes param: dataRetrievalFn must be a defined function');
if (s3config) { if (s3config) {
assert.strictEqual(typeof s3config, 'object', 'bad routes param: s3config must be an object'); assert.strictEqual(typeof s3config, 'object', 'bad routes param: s3config must be an object');
} }
@ -145,8 +138,7 @@ function checkTypes(req, res, params, logger, s3config) {
* @param {string[]} params.blacklistedPrefixes.object - object prefixes * @param {string[]} params.blacklistedPrefixes.object - object prefixes
* @param {object} params.unsupportedQueries - object containing true/false * @param {object} params.unsupportedQueries - object containing true/false
* values for whether queries are supported * values for whether queries are supported
* @param {function} params.dataRetrievalParams - params to create instance of * @param {function} params.dataRetrievalFn - function to retrieve data
* data retrieval function
* @param {RequestLogger} logger - werelogs logger instance * @param {RequestLogger} logger - werelogs logger instance
* @param {String} [s3config] - s3 configuration * @param {String} [s3config] - s3 configuration
* @returns {undefined} * @returns {undefined}
@ -161,7 +153,7 @@ function routes(req, res, params, logger, s3config) {
allEndpoints, allEndpoints,
websiteEndpoints, websiteEndpoints,
blacklistedPrefixes, blacklistedPrefixes,
dataRetrievalParams, dataRetrievalFn,
} = params; } = params;
const clientInfo = { const clientInfo = {
@ -179,11 +171,10 @@ function routes(req, res, params, logger, s3config) {
reqUids = undefined; reqUids = undefined;
} }
const log = (reqUids !== undefined ? const log = (reqUids !== undefined ?
logger.newRequestLoggerFromSerializedUids(reqUids) : logger.newRequestLoggerFromSerializedUids(reqUids) :
logger.newRequestLogger()); logger.newRequestLogger());
if (!req.url.startsWith('/_/healthcheck') && if (!req.url.startsWith('/_/healthcheck')) {
!req.url.startsWith('/_/report')) {
log.info('received request', clientInfo); log.info('received request', clientInfo);
} }
@ -216,7 +207,7 @@ function routes(req, res, params, logger, s3config) {
return routesUtils.responseXMLBody( return routesUtils.responseXMLBody(
errors.InvalidURI.customizeDescription('Could not parse the ' + errors.InvalidURI.customizeDescription('Could not parse the ' +
'specified URI. Check your restEndpoints configuration.'), 'specified URI. Check your restEndpoints configuration.'),
undefined, res, log); undefined, res, log);
} }
log.addDefaultFields({ log.addDefaultFields({
@ -238,17 +229,16 @@ function routes(req, res, params, logger, s3config) {
if (bucketOrKeyError) { if (bucketOrKeyError) {
log.trace('error with bucket or key value', log.trace('error with bucket or key value',
{ error: bucketOrKeyError }); { error: bucketOrKeyError });
return routesUtils.responseXMLBody(bucketOrKeyError, null, res, log); return routesUtils.responseXMLBody(bucketOrKeyError, null, res, log);
} }
// bucket website request // bucket website request
if (websiteEndpoints && websiteEndpoints.indexOf(req.parsedHost) > -1) { if (websiteEndpoints && websiteEndpoints.indexOf(req.parsedHost) > -1) {
return routeWebsite(req, res, api, log, statsClient, return routeWebsite(req, res, api, log, statsClient, dataRetrievalFn);
dataRetrievalParams);
} }
return method(req, res, api, log, statsClient, dataRetrievalParams); return method(req, res, api, log, statsClient, dataRetrievalFn);
} }
module.exports = routes; module.exports = routes;

View File

@ -7,7 +7,7 @@ function routeDELETE(request, response, api, log, statsClient) {
if (request.query.uploadId) { if (request.query.uploadId) {
if (request.objectKey === undefined) { if (request.objectKey === undefined) {
return routesUtils.responseNoBody( return routesUtils.responseNoBody(
errors.InvalidRequest.customizeDescription('A key must be ' + errors.InvalidRequest.customizeDescription('A key must be ' +
'specified'), null, response, 200, log); 'specified'), null, response, 200, log);
} }
api.callApiMethod('multipartDelete', request, response, log, api.callApiMethod('multipartDelete', request, response, log,
@ -19,77 +19,77 @@ function routeDELETE(request, response, api, log, statsClient) {
} else if (request.objectKey === undefined) { } else if (request.objectKey === undefined) {
if (request.query.website !== undefined) { if (request.query.website !== undefined) {
return api.callApiMethod('bucketDeleteWebsite', request, return api.callApiMethod('bucketDeleteWebsite', request,
response, log, (err, corsHeaders) => { response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log); response, 204, log);
}); });
} else if (request.query.cors !== undefined) { } else if (request.query.cors !== undefined) {
return api.callApiMethod('bucketDeleteCors', request, response, return api.callApiMethod('bucketDeleteCors', request, response,
log, (err, corsHeaders) => { log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log); response, 204, log);
}); });
} else if (request.query.replication !== undefined) { } else if (request.query.replication !== undefined) {
return api.callApiMethod('bucketDeleteReplication', request, return api.callApiMethod('bucketDeleteReplication', request,
response, log, (err, corsHeaders) => { response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log); response, 204, log);
}); });
} else if (request.query.lifecycle !== undefined) { } else if (request.query.lifecycle !== undefined) {
return api.callApiMethod('bucketDeleteLifecycle', request, return api.callApiMethod('bucketDeleteLifecycle', request,
response, log, (err, corsHeaders) => { response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log); response, 204, log);
}); });
} else if (request.query.policy !== undefined) { } else if (request.query.policy !== undefined) {
return api.callApiMethod('bucketDeletePolicy', request, return api.callApiMethod('bucketDeletePolicy', request,
response, log, (err, corsHeaders) => { response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log); response, 204, log);
}); });
} else if (request.query.encryption !== undefined) { } else if (request.query.encryption !== undefined) {
return api.callApiMethod('bucketDeleteEncryption', request, return api.callApiMethod('bucketDeleteEncryption', request,
response, log, (err, corsHeaders) => { response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log); response, 204, log);
}); });
} }
api.callApiMethod('bucketDelete', request, response, log, api.callApiMethod('bucketDelete', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, response, return routesUtils.responseNoBody(err, corsHeaders, response,
204, log); 204, log);
}); });
} else { } else {
if (request.query.tagging !== undefined) { if (request.query.tagging !== undefined) {
return api.callApiMethod('objectDeleteTagging', request, return api.callApiMethod('objectDeleteTagging', request,
response, log, (err, resHeaders) => { response, log, (err, resHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, resHeaders, return routesUtils.responseNoBody(err, resHeaders,
response, 204, log); response, 204, log);
}); });
} }
api.callApiMethod('objectDelete', request, response, log, api.callApiMethod('objectDelete', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
/* /*
* Since AWS expects a 204 regardless of the existence of * Since AWS expects a 204 regardless of the existence of
the object, the errors NoSuchKey and NoSuchVersion should not the object, the errors NoSuchKey and NoSuchVersion should not
* be sent back as a response. * be sent back as a response.
*/ */
if (err && !err.NoSuchKey && !err.NoSuchVersion) { if (err && !err.NoSuchKey && !err.NoSuchVersion) {
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, null, log); response, null, log);
} }
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(null, corsHeaders, response, return routesUtils.responseNoBody(null, corsHeaders, response,
204, log); 204, log);
}); });
} }
return undefined; return undefined;
} }

View File

@ -1,8 +1,7 @@
const errors = require('../../errors'); const errors = require('../../errors');
const routesUtils = require('../routesUtils'); const routesUtils = require('../routesUtils');
function routerGET(request, response, api, log, statsClient, function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
dataRetrievalParams) {
log.debug('routing request', { method: 'routerGET' }); log.debug('routing request', { method: 'routerGET' });
if (request.bucketName === undefined && request.objectKey !== undefined) { if (request.bucketName === undefined && request.objectKey !== undefined) {
routesUtils.responseXMLBody(errors.NoSuchBucket, null, response, log); routesUtils.responseXMLBody(errors.NoSuchBucket, null, response, log);
@ -17,18 +16,18 @@ function routerGET(request, response, api, log, statsClient,
// GET bucket ACL // GET bucket ACL
if (request.query.acl !== undefined) { if (request.query.acl !== undefined) {
api.callApiMethod('bucketGetACL', request, response, log, api.callApiMethod('bucketGetACL', request, response, log,
(err, xml, corsHeaders) => { (err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, log, return routesUtils.responseXMLBody(err, xml, response, log,
corsHeaders); corsHeaders);
}); });
} else if (request.query.replication !== undefined) { } else if (request.query.replication !== undefined) {
api.callApiMethod('bucketGetReplication', request, response, log, api.callApiMethod('bucketGetReplication', request, response, log,
(err, xml, corsHeaders) => { (err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, log, return routesUtils.responseXMLBody(err, xml, response, log,
corsHeaders); corsHeaders);
}); });
} else if (request.query.cors !== undefined) { } else if (request.query.cors !== undefined) {
api.callApiMethod('bucketGetCors', request, response, log, api.callApiMethod('bucketGetCors', request, response, log,
(err, xml, corsHeaders) => { (err, xml, corsHeaders) => {
@ -70,7 +69,7 @@ function routerGET(request, response, api, log, statsClient,
(err, xml, corsHeaders) => { (err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, log, return routesUtils.responseXMLBody(err, xml, response, log,
corsHeaders); corsHeaders);
}); });
} else if (request.query.policy !== undefined) { } else if (request.query.policy !== undefined) {
api.callApiMethod('bucketGetPolicy', request, response, log, api.callApiMethod('bucketGetPolicy', request, response, log,
@ -95,18 +94,11 @@ function routerGET(request, response, api, log, statsClient,
}); });
} else if (request.query.encryption !== undefined) { } else if (request.query.encryption !== undefined) {
api.callApiMethod('bucketGetEncryption', request, response, log, api.callApiMethod('bucketGetEncryption', request, response, log,
(err, xml, corsHeaders) => { (err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, return routesUtils.responseXMLBody(err, xml, response,
log, corsHeaders); log, corsHeaders);
}); });
} else if (request.query.search !== undefined) {
api.callApiMethod('metadataSearch', request, response, log,
(err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response,
log, corsHeaders);
});
} else { } else {
// GET bucket // GET bucket
api.callApiMethod('bucketGet', request, response, log, api.callApiMethod('bucketGet', request, response, log,
@ -165,8 +157,8 @@ function routerGET(request, response, api, log, statsClient,
log.end().addDefaultFields({ contentLength }); log.end().addDefaultFields({ contentLength });
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseStreamData(err, request.query, return routesUtils.responseStreamData(err, request.query,
resMetaHeaders, dataGetInfo, dataRetrievalParams, resMetaHeaders, dataGetInfo, dataRetrievalFn, response,
response, range, log); range, log);
}); });
} }
} }

View File

@ -21,11 +21,11 @@ function routeOPTIONS(request, response, api, log, statsClient) {
} }
return api.callApiMethod('corsPreflight', request, response, log, return api.callApiMethod('corsPreflight', request, response, log,
(err, resHeaders) => { (err, resHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, resHeaders, response, 200, return routesUtils.responseNoBody(err, resHeaders, response, 200,
log); log);
}); });
} }
module.exports = routeOPTIONS; module.exports = routeOPTIONS;

View File

@ -27,28 +27,28 @@ function routePOST(request, response, api, log) {
if (request.query.uploads !== undefined) { if (request.query.uploads !== undefined) {
return api.callApiMethod('initiateMultipartUpload', request, return api.callApiMethod('initiateMultipartUpload', request,
response, log, (err, result, corsHeaders) => response, log, (err, result, corsHeaders) =>
routesUtils.responseXMLBody(err, result, response, log, routesUtils.responseXMLBody(err, result, response, log,
corsHeaders)); corsHeaders));
} }
// POST complete multipart upload // POST complete multipart upload
if (request.query.uploadId !== undefined) { if (request.query.uploadId !== undefined) {
return api.callApiMethod('completeMultipartUpload', request, return api.callApiMethod('completeMultipartUpload', request,
response, log, (err, result, resHeaders) => response, log, (err, result, resHeaders) =>
routesUtils.responseXMLBody(err, result, response, log, routesUtils.responseXMLBody(err, result, response, log,
resHeaders)); resHeaders));
} }
// POST multiObjectDelete // POST multiObjectDelete
if (request.query.delete !== undefined) { if (request.query.delete !== undefined) {
return api.callApiMethod('multiObjectDelete', request, response, return api.callApiMethod('multiObjectDelete', request, response,
log, (err, xml, corsHeaders) => log, (err, xml, corsHeaders) =>
routesUtils.responseXMLBody(err, xml, response, log, routesUtils.responseXMLBody(err, xml, response, log,
corsHeaders)); corsHeaders));
} }
return routesUtils.responseNoBody(errors.NotImplemented, null, response, return routesUtils.responseNoBody(errors.NotImplemented, null, response,
200, log); 200, log);
} }
/* eslint-enable no-param-reassign */ /* eslint-enable no-param-reassign */
module.exports = routePOST; module.exports = routePOST;

View File

@ -14,16 +14,16 @@ function routePUT(request, response, api, log, statsClient) {
|| contentLength < 0)) || contentLength === '') { || contentLength < 0)) || contentLength === '') {
log.debug('invalid content-length header'); log.debug('invalid content-length header');
return routesUtils.responseNoBody( return routesUtils.responseNoBody(
errors.BadRequest, null, response, null, log); errors.BadRequest, null, response, null, log);
} }
// PUT bucket ACL // PUT bucket ACL
if (request.query.acl !== undefined) { if (request.query.acl !== undefined) {
api.callApiMethod('bucketPutACL', request, response, log, api.callApiMethod('bucketPutACL', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 200, log); response, 200, log);
}); });
} else if (request.query.versioning !== undefined) { } else if (request.query.versioning !== undefined) {
api.callApiMethod('bucketPutVersioning', request, response, log, api.callApiMethod('bucketPutVersioning', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
@ -82,11 +82,11 @@ function routePUT(request, response, api, log, statsClient) {
}); });
} else if (request.query.encryption !== undefined) { } else if (request.query.encryption !== undefined) {
api.callApiMethod('bucketPutEncryption', request, response, log, api.callApiMethod('bucketPutEncryption', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 200, log); response, 200, log);
}); });
} else { } else {
// PUT bucket // PUT bucket
return api.callApiMethod('bucketPut', request, response, log, return api.callApiMethod('bucketPut', request, response, log,
@ -110,7 +110,7 @@ function routePUT(request, response, api, log, statsClient) {
method: 'routePUT', method: 'routePUT',
}); });
return routesUtils return routesUtils
.responseNoBody(errors.InvalidDigest, null, response, 200, log); .responseNoBody(errors.InvalidDigest, null, response, 200, log);
} }
if (request.headers['content-md5']) { if (request.headers['content-md5']) {
request.contentMD5 = request.headers['content-md5']; request.contentMD5 = request.headers['content-md5'];
@ -126,17 +126,17 @@ function routePUT(request, response, api, log, statsClient) {
}); });
return routesUtils return routesUtils
.responseNoBody(errors.InvalidDigest, null, response, 200, .responseNoBody(errors.InvalidDigest, null, response, 200,
log); log);
} }
} }
if (request.query.partNumber) { if (request.query.partNumber) {
if (request.headers['x-amz-copy-source']) { if (request.headers['x-amz-copy-source']) {
api.callApiMethod('objectPutCopyPart', request, response, log, api.callApiMethod('objectPutCopyPart', request, response, log,
(err, xml, additionalHeaders) => { (err, xml, additionalHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, log, return routesUtils.responseXMLBody(err, xml, response, log,
additionalHeaders); additionalHeaders);
}); });
} else { } else {
api.callApiMethod('objectPutPart', request, response, log, api.callApiMethod('objectPutPart', request, response, log,
(err, calculatedHash, corsHeaders) => { (err, calculatedHash, corsHeaders) => {
@ -202,11 +202,11 @@ function routePUT(request, response, api, log, statsClient) {
contentLength: request.parsedContentLength, contentLength: request.parsedContentLength,
}); });
api.callApiMethod('objectPut', request, response, log, api.callApiMethod('objectPut', request, response, log,
(err, resHeaders) => { (err, resHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, resHeaders, return routesUtils.responseNoBody(err, resHeaders,
response, 200, log); response, 200, log);
}); });
} }
} }
return undefined; return undefined;

View File

@ -2,7 +2,7 @@ const errors = require('../../errors');
const routesUtils = require('../routesUtils'); const routesUtils = require('../routesUtils');
function routerWebsite(request, response, api, log, statsClient, function routerWebsite(request, response, api, log, statsClient,
dataRetrievalParams) { dataRetrievalFn) {
log.debug('routing request', { method: 'routerWebsite' }); log.debug('routing request', { method: 'routerWebsite' });
// website endpoint only supports GET and HEAD and must have a bucket // website endpoint only supports GET and HEAD and must have a bucket
// http://docs.aws.amazon.com/AmazonS3/latest/dev/WebsiteEndpoints.html // http://docs.aws.amazon.com/AmazonS3/latest/dev/WebsiteEndpoints.html
@ -14,7 +14,7 @@ function routerWebsite(request, response, api, log, statsClient,
if (request.method === 'GET') { if (request.method === 'GET') {
return api.callApiMethod('websiteGet', request, response, log, return api.callApiMethod('websiteGet', request, response, log,
(err, userErrorPageFailure, dataGetInfo, resMetaHeaders, (err, userErrorPageFailure, dataGetInfo, resMetaHeaders,
redirectInfo, key) => { redirectInfo, key) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
// request being redirected // request being redirected
if (redirectInfo) { if (redirectInfo) {
@ -27,7 +27,7 @@ function routerWebsite(request, response, api, log, statsClient,
// user has their own error page // user has their own error page
if (err && dataGetInfo) { if (err && dataGetInfo) {
return routesUtils.streamUserErrorPage(err, dataGetInfo, return routesUtils.streamUserErrorPage(err, dataGetInfo,
dataRetrievalParams, response, resMetaHeaders, log); dataRetrievalFn, response, resMetaHeaders, log);
} }
// send default error html response // send default error html response
if (err) { if (err) {
@ -37,27 +37,27 @@ function routerWebsite(request, response, api, log, statsClient,
} }
// no error, stream data // no error, stream data
return routesUtils.responseStreamData(null, request.query, return routesUtils.responseStreamData(null, request.query,
resMetaHeaders, dataGetInfo, dataRetrievalParams, response, resMetaHeaders, dataGetInfo, dataRetrievalFn, response,
null, log); null, log);
}); });
} }
if (request.method === 'HEAD') { if (request.method === 'HEAD') {
return api.callApiMethod('websiteHead', request, response, log, return api.callApiMethod('websiteHead', request, response, log,
(err, resMetaHeaders, redirectInfo, key) => { (err, resMetaHeaders, redirectInfo, key) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
if (redirectInfo) { if (redirectInfo) {
return routesUtils.redirectRequest(redirectInfo, return routesUtils.redirectRequest(redirectInfo,
key, request.connection.encrypted, key, request.connection.encrypted,
response, request.headers.host, resMetaHeaders, log); response, request.headers.host, resMetaHeaders, log);
} }
// could redirect on err so check for redirectInfo first // could redirect on err so check for redirectInfo first
if (err) { if (err) {
return routesUtils.errorHeaderResponse(err, response, return routesUtils.errorHeaderResponse(err, response,
resMetaHeaders, log); resMetaHeaders, log);
} }
return routesUtils.responseContentHeaders(err, {}, resMetaHeaders, return routesUtils.responseContentHeaders(err, {}, resMetaHeaders,
response, log); response, log);
}); });
} }
return undefined; return undefined;
} }

View File

@ -4,9 +4,6 @@ const errors = require('../errors');
const constants = require('../constants'); const constants = require('../constants');
const { eachSeries } = require('async'); const { eachSeries } = require('async');
const DataWrapper = require('../storage/data/DataWrapper');
const { objectKeyByteLimit } = require('../constants');
const responseErr = new Error(); const responseErr = new Error();
responseErr.code = 'ResponseError'; responseErr.code = 'ResponseError';
responseErr.message = 'response closed by client request before all data sent'; responseErr.message = 'response closed by client request before all data sent';
@ -28,7 +25,7 @@ function setCommonResponseHeaders(headers, response, log) {
} catch (e) { } catch (e) {
log.debug('header can not be added ' + log.debug('header can not be added ' +
'to the response', { header: headers[key], 'to the response', { header: headers[key],
error: e.stack, method: 'setCommonResponseHeaders' }); error: e.stack, method: 'setCommonResponseHeaders' });
} }
} }
}); });
@ -71,7 +68,7 @@ const XMLResponseBackend = {
* @return {object} response - response object with additional headers * @return {object} response - response object with additional headers
*/ */
okResponse: function okXMLResponse(xml, response, log, okResponse: function okXMLResponse(xml, response, log,
additionalHeaders) { additionalHeaders) {
const bytesSent = Buffer.byteLength(xml); const bytesSent = Buffer.byteLength(xml);
log.trace('sending success xml response'); log.trace('sending success xml response');
log.addDefaultFields({ log.addDefaultFields({
@ -118,7 +115,7 @@ const XMLResponseBackend = {
`<Message>${errCode.description}</Message>`, `<Message>${errCode.description}</Message>`,
'<Resource></Resource>', '<Resource></Resource>',
`<RequestId>${log.getSerializedUids()}</RequestId>`, `<RequestId>${log.getSerializedUids()}</RequestId>`,
'</Error>', '</Error>'
); );
const xmlStr = xml.join(''); const xmlStr = xml.join('');
const bytesSent = Buffer.byteLength(xmlStr); const bytesSent = Buffer.byteLength(xmlStr);
@ -148,7 +145,7 @@ const JSONResponseBackend = {
* @return {object} response - response object with additional headers * @return {object} response - response object with additional headers
*/ */
okResponse: function okJSONResponse(json, response, log, okResponse: function okJSONResponse(json, response, log,
additionalHeaders) { additionalHeaders) {
const bytesSent = Buffer.byteLength(json); const bytesSent = Buffer.byteLength(json);
log.trace('sending success json response'); log.trace('sending success json response');
log.addDefaultFields({ log.addDefaultFields({
@ -166,7 +163,7 @@ const JSONResponseBackend = {
}, },
errorResponse: function errorJSONResponse(errCode, response, log, errorResponse: function errorJSONResponse(errCode, response, log,
corsHeaders) { corsHeaders) {
log.trace('sending error json response', { errCode }); log.trace('sending error json response', { errCode });
/* /*
{ {
@ -260,7 +257,7 @@ function okContentHeadersResponse(overrideParams, resHeaders,
return response; return response;
} }
function retrieveDataAzure(locations, retrieveDataParams, response, logger) { function retrieveDataAzure(locations, retrieveDataFn, response, logger) {
const errorHandlerFn = () => { response.connection.destroy(); }; const errorHandlerFn = () => { response.connection.destroy(); };
const current = locations.shift(); const current = locations.shift();
@ -268,18 +265,7 @@ function retrieveDataAzure(locations, retrieveDataParams, response, logger) {
logger.error('error piping data from source'); logger.error('error piping data from source');
errorHandlerFn(err); errorHandlerFn(err);
}); });
const { return retrieveDataFn(current, response, logger, err => {
client,
implName,
config,
kms,
metadata,
locStorageCheckFn,
vault,
} = retrieveDataParams;
const data = new DataWrapper(
client, implName, config, kms, metadata, locStorageCheckFn, vault);
return data.get(current, response, logger, err => {
if (err) { if (err) {
logger.error('failed to get object from source', { logger.error('failed to get object from source', {
error: err, error: err,
@ -292,12 +278,12 @@ function retrieveDataAzure(locations, retrieveDataParams, response, logger) {
}); });
} }
function retrieveData(locations, retrieveDataParams, response, log) { function retrieveData(locations, retrieveDataFn, response, log) {
if (locations.length === 0) { if (locations.length === 0) {
return response.end(); return response.end();
} }
if (locations[0].azureStreamingOptions) { if (locations[0].azureStreamingOptions) {
return retrieveDataAzure(locations, retrieveDataParams, response, log); return retrieveDataAzure(locations, retrieveDataFn, response, log);
} }
// response is of type http.ServerResponse // response is of type http.ServerResponse
let responseDestroyed = false; let responseDestroyed = false;
@ -307,33 +293,16 @@ function retrieveData(locations, retrieveDataParams, response, log) {
response.destroy(); response.destroy();
responseDestroyed = true; responseDestroyed = true;
}; };
const _destroyReadable = readable => {
// s3-data sends Readable stream only which does not implement destroy
if (readable && readable.destroy) {
readable.destroy();
}
};
// the S3-client might close the connection while we are processing it // the S3-client might close the connection while we are processing it
response.once('close', () => { response.once('close', () => {
responseDestroyed = true; responseDestroyed = true;
_destroyReadable(currentStream); if (currentStream) {
currentStream.destroy();
}
}); });
const {
client,
implName,
config,
kms,
metadata,
locStorageCheckFn,
vault,
} = retrieveDataParams;
const data = new DataWrapper(
client, implName, config, kms, metadata, locStorageCheckFn, vault);
return eachSeries(locations, return eachSeries(locations,
(current, next) => data.get(current, response, log, (current, next) => retrieveDataFn(current, response, log,
(err, readable) => { (err, readable) => {
// NB: readable is of IncomingMessage type // NB: readable is of IncomingMessage type
if (err) { if (err) {
@ -350,7 +319,7 @@ function retrieveData(locations, retrieveDataParams, response, log) {
if (responseDestroyed || response.isclosed) { if (responseDestroyed || response.isclosed) {
log.debug( log.debug(
'response destroyed before readable could stream'); 'response destroyed before readable could stream');
_destroyReadable(readable); readable.destroy();
return next(responseErr); return next(responseErr);
} }
// readable stream successfully consumed // readable stream successfully consumed
@ -368,27 +337,27 @@ function retrieveData(locations, retrieveDataParams, response, log) {
currentStream = readable; currentStream = readable;
return readable.pipe(response, { end: false }); return readable.pipe(response, { end: false });
}), err => { }), err => {
currentStream = null; currentStream = null;
if (err) { if (err) {
log.debug('abort response due to error', { log.debug('abort response due to error', {
error: err.code, errMsg: err.message }); error: err.code, errMsg: err.message });
}
// call end for all cases (error/success) per node.js docs
// recommendation
response.end();
} }
// call end for all cases (error/success) per node.js docs
// recommendation
response.end();
},
); );
} }
function _responseBody(responseBackend, errCode, payload, response, log, function _responseBody(responseBackend, errCode, payload, response, log,
additionalHeaders) { additionalHeaders) {
if (errCode && !response.headersSent) { if (errCode && !response.headersSent) {
return responseBackend.errorResponse(errCode, response, log, return responseBackend.errorResponse(errCode, response, log,
additionalHeaders); additionalHeaders);
} }
if (!response.headersSent) { if (!response.headersSent) {
return responseBackend.okResponse(payload, response, log, return responseBackend.okResponse(payload, response, log,
additionalHeaders); additionalHeaders);
} }
return undefined; return undefined;
} }
@ -397,8 +366,8 @@ function _computeContentLengthFromLocation(dataLocations) {
return dataLocations.reduce( return dataLocations.reduce(
(sum, location) => (sum !== undefined && (sum, location) => (sum !== undefined &&
(typeof location.size === 'number' || typeof location.size === 'string') ? (typeof location.size === 'number' || typeof location.size === 'string') ?
sum + Number.parseInt(location.size, 10) : sum + Number.parseInt(location.size, 10) :
undefined), 0); undefined), 0);
} }
function _contentLengthMatchesLocations(contentLength, dataLocations) { function _contentLengthMatchesLocations(contentLength, dataLocations) {
@ -419,7 +388,7 @@ const routesUtils = {
*/ */
responseXMLBody(errCode, xml, response, log, additionalHeaders) { responseXMLBody(errCode, xml, response, log, additionalHeaders) {
return _responseBody(XMLResponseBackend, errCode, xml, response, return _responseBody(XMLResponseBackend, errCode, xml, response,
log, additionalHeaders); log, additionalHeaders);
}, },
/** /**
@ -433,7 +402,7 @@ const routesUtils = {
*/ */
responseJSONBody(errCode, json, response, log, additionalHeaders) { responseJSONBody(errCode, json, response, log, additionalHeaders) {
return _responseBody(JSONResponseBackend, errCode, json, response, return _responseBody(JSONResponseBackend, errCode, json, response,
log, additionalHeaders); log, additionalHeaders);
}, },
/** /**
@ -448,7 +417,7 @@ const routesUtils = {
responseNoBody(errCode, resHeaders, response, httpCode = 200, log) { responseNoBody(errCode, resHeaders, response, httpCode = 200, log) {
if (errCode && !response.headersSent) { if (errCode && !response.headersSent) {
return XMLResponseBackend.errorResponse(errCode, response, log, return XMLResponseBackend.errorResponse(errCode, response, log,
resHeaders); resHeaders);
} }
if (!response.headersSent) { if (!response.headersSent) {
return okHeaderResponse(resHeaders, response, httpCode, log); return okHeaderResponse(resHeaders, response, httpCode, log);
@ -466,10 +435,10 @@ const routesUtils = {
* @return {object} - router's response object * @return {object} - router's response object
*/ */
responseContentHeaders(errCode, overrideParams, resHeaders, response, responseContentHeaders(errCode, overrideParams, resHeaders, response,
log) { log) {
if (errCode && !response.headersSent) { if (errCode && !response.headersSent) {
return XMLResponseBackend.errorResponse(errCode, response, log, return XMLResponseBackend.errorResponse(errCode, response, log,
resHeaders); resHeaders);
} }
if (!response.headersSent) { if (!response.headersSent) {
// Undefined added as an argument since need to send range to // Undefined added as an argument since need to send range to
@ -492,8 +461,7 @@ const routesUtils = {
* @param {array | null} dataLocations -- * @param {array | null} dataLocations --
* - array of locations to get streams from sproxyd * - array of locations to get streams from sproxyd
* - null if no data for object and only metadata * - null if no data for object and only metadata
* @param {object} retrieveDataParams - params to create instance of data * @param {function} retrieveDataFn - function to handle streaming data
* retrieval function
* @param {http.ServerResponse} response - response sent to the client * @param {http.ServerResponse} response - response sent to the client
* @param {array | undefined} range - range in format of [start, end] * @param {array | undefined} range - range in format of [start, end]
* if range header contained in request or undefined if not * if range header contained in request or undefined if not
@ -501,10 +469,10 @@ const routesUtils = {
* @return {undefined} * @return {undefined}
*/ */
responseStreamData(errCode, overrideParams, resHeaders, dataLocations, responseStreamData(errCode, overrideParams, resHeaders, dataLocations,
retrieveDataParams, response, range, log) { retrieveDataFn, response, range, log) {
if (errCode && !response.headersSent) { if (errCode && !response.headersSent) {
return XMLResponseBackend.errorResponse(errCode, response, log, return XMLResponseBackend.errorResponse(errCode, response, log,
resHeaders); resHeaders);
} }
if (dataLocations !== null && !response.headersSent) { if (dataLocations !== null && !response.headersSent) {
// sanity check of content length against individual data // sanity check of content length against individual data
@ -512,13 +480,13 @@ const routesUtils = {
const contentLength = resHeaders && resHeaders['Content-Length']; const contentLength = resHeaders && resHeaders['Content-Length'];
if (contentLength !== undefined && if (contentLength !== undefined &&
!_contentLengthMatchesLocations(contentLength, !_contentLengthMatchesLocations(contentLength,
dataLocations)) { dataLocations)) {
log.error('logic error: total length of fetched data ' + log.error('logic error: total length of fetched data ' +
'locations does not match returned content-length', 'locations does not match returned content-length',
{ contentLength, dataLocations }); { contentLength, dataLocations });
return XMLResponseBackend.errorResponse(errors.InternalError, return XMLResponseBackend.errorResponse(errors.InternalError,
response, log, response, log,
resHeaders); resHeaders);
} }
} }
if (!response.headersSent) { if (!response.headersSent) {
@ -537,21 +505,20 @@ const routesUtils = {
httpCode: response.statusCode, httpCode: response.statusCode,
}); });
}); });
return retrieveData(dataLocations, retrieveDataParams, response, log); return retrieveData(dataLocations, retrieveDataFn, response, log);
}, },
/** /**
* @param {object} err -- arsenal error object * @param {object} err -- arsenal error object
* @param {array} dataLocations -- * @param {array} dataLocations --
* - array of locations to get streams from backend * - array of locations to get streams from backend
* @param {object} retrieveDataParams - params to create instance of * @param {function} retrieveDataFn - function to handle streaming data
* data retrieval function
* @param {http.ServerResponse} response - response sent to the client * @param {http.ServerResponse} response - response sent to the client
* @param {object} corsHeaders - CORS-related response headers * @param {object} corsHeaders - CORS-related response headers
* @param {object} log - Werelogs logger * @param {object} log - Werelogs logger
* @return {undefined} * @return {undefined}
*/ */
streamUserErrorPage(err, dataLocations, retrieveDataParams, response, streamUserErrorPage(err, dataLocations, retrieveDataFn, response,
corsHeaders, log) { corsHeaders, log) {
setCommonResponseHeaders(corsHeaders, response, log); setCommonResponseHeaders(corsHeaders, response, log);
response.writeHead(err.code, { 'Content-type': 'text/html' }); response.writeHead(err.code, { 'Content-type': 'text/html' });
@ -560,7 +527,7 @@ const routesUtils = {
httpCode: response.statusCode, httpCode: response.statusCode,
}); });
}); });
return retrieveData(dataLocations, retrieveDataParams, response, log); return retrieveData(dataLocations, retrieveDataFn, response, log);
}, },
/** /**
@ -591,7 +558,7 @@ const routesUtils = {
`<h1>${err.code} ${response.statusMessage}</h1>`, `<h1>${err.code} ${response.statusMessage}</h1>`,
'<ul>', '<ul>',
`<li>Code: ${err.message}</li>`, `<li>Code: ${err.message}</li>`,
`<li>Message: ${err.description}</li>`, `<li>Message: ${err.description}</li>`
); );
if (!userErrorPageFailure && bucketName) { if (!userErrorPageFailure && bucketName) {
@ -601,7 +568,7 @@ const routesUtils = {
`<li>RequestId: ${log.getSerializedUids()}</li>`, `<li>RequestId: ${log.getSerializedUids()}</li>`,
// AWS response contains HostId here. // AWS response contains HostId here.
// TODO: consider adding // TODO: consider adding
'</ul>', '</ul>'
); );
if (userErrorPageFailure) { if (userErrorPageFailure) {
html.push( html.push(
@ -611,13 +578,13 @@ const routesUtils = {
'<ul>', '<ul>',
`<li>Code: ${err.message}</li>`, `<li>Code: ${err.message}</li>`,
`<li>Message: ${err.description}</li>`, `<li>Message: ${err.description}</li>`,
'</ul>', '</ul>'
); );
} }
html.push( html.push(
'<hr/>', '<hr/>',
'</body>', '</body>',
'</html>', '</html>'
); );
return response.end(html.join(''), 'utf8', () => { return response.end(html.join(''), 'utf8', () => {
@ -839,7 +806,7 @@ const routesUtils = {
// most specific potential hostname // most specific potential hostname
bucketName = bucketName =
potentialBucketName.length < bucketName.length ? potentialBucketName.length < bucketName.length ?
potentialBucketName : bucketName; potentialBucketName : bucketName;
} }
} }
} }
@ -847,7 +814,7 @@ const routesUtils = {
return bucketName; return bucketName;
} }
throw new Error( throw new Error(
`bad request: hostname ${host} is not in valid endpoints`, `bad request: hostname ${host} is not in valid endpoints`
); );
}, },
@ -914,9 +881,6 @@ const routesUtils = {
if (invalidPrefix) { if (invalidPrefix) {
return { isValid: false, invalidPrefix }; return { isValid: false, invalidPrefix };
} }
if (Buffer.byteLength(objectKey, 'utf8') > objectKeyByteLimit) {
return { isValid: false };
}
return { isValid: true }; return { isValid: true };
}, },

View File

@ -79,9 +79,9 @@ class ListRecordStream extends stream.Readable {
'did not encounter the last saved offset in oplog, ' + 'did not encounter the last saved offset in oplog, ' +
'resuming processing right after the latest record ' + 'resuming processing right after the latest record ' +
'to date; some entries may have been skipped', { 'to date; some entries may have been skipped', {
lastSavedID: this._lastSavedID, lastSavedID: this._lastSavedID,
latestRecordID: this._latestOplogID, latestRecordID: this._latestOplogID,
}); });
this._unpublishedListing = true; this._unpublishedListing = true;
} }
++this._skipCount; ++this._skipCount;

View File

@ -552,11 +552,11 @@ class MongoClientInterface {
$exists: false, $exists: false,
}, },
}, },
{ {
'value.versionId': { 'value.versionId': {
$gt: objVal.versionId, $gt: objVal.versionId,
},
}, },
},
], ],
}; };
// values to update master // values to update master

View File

@ -29,4 +29,5 @@ server.start(() => {
logger.info('Metadata Proxy Server successfully started. ' + logger.info('Metadata Proxy Server successfully started. ' +
`Using the ${metadataWrapper.implName} backend`); `Using the ${metadataWrapper.implName} backend`);
}); });
``` ```

View File

@ -60,8 +60,8 @@ class TestMatrix {
this.elementsToSpecialize = elementsToSpecialize; this.elementsToSpecialize = elementsToSpecialize;
this.callback = callback; this.callback = callback;
this.description = typeof description === 'undefined' this.description = typeof description === 'undefined'
? '' ? ''
: description; : description;
return this; return this;
} }
@ -158,15 +158,15 @@ class TestMatrix {
const callFunction = (matrixFather, matrixChild, callback, const callFunction = (matrixFather, matrixChild, callback,
description) => { description) => {
const result = Object.keys(matrixChild.params) const result = Object.keys(matrixChild.params)
.every(currentKey => .every(currentKey =>
Object.prototype.toString.call( Object.prototype.toString.call(
matrixChild.params[currentKey], matrixChild.params[currentKey]
).indexOf('Array') === -1); ).indexOf('Array') === -1);
if (result === true) { if (result === true) {
describe(matrixChild.serialize(), () => { describe(matrixChild.serialize(), () => {
it(description, it(description,
done => callback(matrixChild, done)); done => callback(matrixChild, done));
}); });
} else { } else {
describe(matrixChild.serialize(), () => { describe(matrixChild.serialize(), () => {

View File

@ -247,7 +247,7 @@ function decode(str) {
} }
module.exports = { generateVersionId, getInfVid, module.exports = { generateVersionId, getInfVid,
hexEncode, hexDecode, hexEncode, hexDecode,
base62Encode, base62Decode, base62Encode, base62Decode,
encode, decode, encode, decode,
ENC_TYPE_HEX, ENC_TYPE_BASE62 }; ENC_TYPE_HEX, ENC_TYPE_BASE62 };

View File

@ -84,7 +84,7 @@ class VersioningRequestProcessor {
return callback(null, data); return callback(null, data);
} }
logger.debug('master version is a PHD, getting the latest version', logger.debug('master version is a PHD, getting the latest version',
{ db, key }); { db, key });
// otherwise, need to search for the latest version // otherwise, need to search for the latest version
return this.getByListing(request, logger, callback); return this.getByListing(request, logger, callback);
}); });
@ -187,7 +187,7 @@ class VersioningRequestProcessor {
return entry.callback(err, value); return entry.callback(err, value);
} }
return this.wgm.get(entry.request, entry.logger, return this.wgm.get(entry.request, entry.logger,
entry.callback); entry.callback);
}); });
delete this.queue[cacheKey]; delete this.queue[cacheKey];
} }
@ -267,19 +267,19 @@ class VersioningRequestProcessor {
return callback(err); return callback(err);
} }
return this.writeCache.batch({ db, array, options }, return this.writeCache.batch({ db, array, options },
logger, err => callback(err, `{"versionId":"${vid}"}`)); logger, err => callback(err, `{"versionId":"${vid}"}`));
}; };
if (versionId) { if (versionId) {
return this.processVersionSpecificPut(request, logger, return this.processVersionSpecificPut(request, logger,
versioningCb); versioningCb);
} }
if (versioning) { if (versioning) {
return this.processNewVersionPut(request, logger, versioningCb); return this.processNewVersionPut(request, logger, versioningCb);
} }
// no versioning or versioning configuration off // no versioning or versioning configuration off
return this.writeCache.batch({ db, array: [{ key, value }] }, return this.writeCache.batch({ db, array: [{ key, value }] },
logger, callback); logger, callback);
} }
/** /**
@ -353,7 +353,7 @@ class VersioningRequestProcessor {
if (!(options && options.versionId)) { if (!(options && options.versionId)) {
return this.writeCache.batch({ db, return this.writeCache.batch({ db,
array: [{ key, type: 'del' }] }, array: [{ key, type: 'del' }] },
logger, callback); logger, callback);
} }
// version specific DELETE // version specific DELETE
return this.processVersionSpecificDelete(request, logger, return this.processVersionSpecificDelete(request, logger,
@ -399,7 +399,7 @@ class VersioningRequestProcessor {
const cacheKey = formatCacheKey(db, key); const cacheKey = formatCacheKey(db, key);
clearTimeout(this.repairing[cacheKey]); clearTimeout(this.repairing[cacheKey]);
this.repairing[cacheKey] = setTimeout(() => this.repairing[cacheKey] = setTimeout(() =>
this.getByListing(request, logger, () => {}), 15000); this.getByListing(request, logger, () => {}), 15000);
} }
return callback(null, ops, versionId); return callback(null, ops, versionId);
}); });

View File

@ -3,14 +3,14 @@
"engines": { "engines": {
"node": ">=16" "node": ">=16"
}, },
"version": "8.1.39", "version": "7.10.15",
"description": "Common utilities for the S3 project components", "description": "Common utilities for the S3 project components",
"main": "build/index.js", "main": "build/index.js",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/scality/Arsenal.git" "url": "git+https://github.com/scality/Arsenal.git"
}, },
"author": "Scality Inc.", "author": "Giorgio Regni",
"license": "Apache-2.0", "license": "Apache-2.0",
"bugs": { "bugs": {
"url": "https://github.com/scality/Arsenal/issues" "url": "https://github.com/scality/Arsenal/issues"
@ -21,18 +21,17 @@
"JSONStream": "^1.0.0", "JSONStream": "^1.0.0",
"agentkeepalive": "^4.1.3", "agentkeepalive": "^4.1.3",
"ajv": "6.12.2", "ajv": "6.12.2",
"async": "~2.6.1", "async": "~2.1.5",
"aws-sdk": "2.80.0", "aws-sdk": "2.80.0",
"azure-storage": "2.10.3", "azure-storage": "2.10.3",
"backo": "^1.1.0", "backo": "^1.1.0",
"base-x": "3.0.8", "base-x": "3.0.8",
"base62": "2.0.1", "base62": "2.0.1",
"bson": "4.0.0", "bson": "4.0.0",
"debug": "~4.1.0", "debug": "~2.6.9",
"diskusage": "^1.1.1", "diskusage": "^1.1.1",
"fcntl": "github:scality/node-fcntl#0.2.0", "fcntl": "github:scality/node-fcntl#0.2.0",
"hdclient": "scality/hdclient#1.1.0", "hdclient": "scality/hdclient#1.1.0",
"https-proxy-agent": "^2.2.0",
"ioredis": "^4.28.5", "ioredis": "^4.28.5",
"ipaddr.js": "1.9.1", "ipaddr.js": "1.9.1",
"level": "~5.0.1", "level": "~5.0.1",
@ -40,11 +39,11 @@
"mongodb": "^3.0.1", "mongodb": "^3.0.1",
"node-forge": "^0.7.1", "node-forge": "^0.7.1",
"prom-client": "10.2.3", "prom-client": "10.2.3",
"simple-glob": "^0.2.0", "simple-glob": "^0.2",
"socket.io": "2.4.1", "socket.io": "~2.3.0",
"socket.io-client": "2.4.0", "socket.io-client": "~2.3.0",
"sproxydclient": "github:scality/sproxydclient#8.0.3", "sproxydclient": "github:scality/sproxydclient#8.0.3",
"utf8": "3.0.0", "utf8": "2.1.2",
"uuid": "^3.0.1", "uuid": "^3.0.1",
"werelogs": "scality/werelogs#8.1.0", "werelogs": "scality/werelogs#8.1.0",
"xml2js": "~0.4.23" "xml2js": "~0.4.23"
@ -58,13 +57,13 @@
"@sinonjs/fake-timers": "^6.0.1", "@sinonjs/fake-timers": "^6.0.1",
"@types/jest": "^27.4.1", "@types/jest": "^27.4.1",
"@types/node": "^17.0.21", "@types/node": "^17.0.21",
"eslint": "^8.9.0", "eslint": "2.13.1",
"eslint-config-airbnb": "6.2.0", "eslint-config-airbnb": "6.2.0",
"eslint-config-scality": "scality/Guidelines#ec33dfb", "eslint-config-scality": "scality/Guidelines#7.10.2",
"eslint-plugin-react": "^4.3.0", "eslint-plugin-react": "^4.3.0",
"jest": "^27.5.1", "jest": "^27.5.1",
"mocha": "8.0.1",
"mongodb-memory-server": "^6.0.2", "mongodb-memory-server": "^6.0.2",
"nyc": "^15.1.0",
"sinon": "^9.0.2", "sinon": "^9.0.2",
"temp": "0.9.1", "temp": "0.9.1",
"ts-jest": "^27.1.3", "ts-jest": "^27.1.3",
@ -78,15 +77,11 @@
"test": "jest tests/unit", "test": "jest tests/unit",
"build": "tsc", "build": "tsc",
"prepare": "yarn build || true", "prepare": "yarn build || true",
"ft_test": "jest tests/functional --testTimeout=120000 --forceExit", "ft_test": "jest tests/functional --testTimeout=120000 --forceExit"
"coverage": "nyc --clean jest tests --coverage --testTimeout=120000 --forceExit"
}, },
"private": true,
"jest": { "jest": {
"private": true,
"maxWorkers": 1, "maxWorkers": 1,
"coverageReporters": [
"json"
],
"collectCoverageFrom": [ "collectCoverageFrom": [
"lib/**/*.{js,ts}", "lib/**/*.{js,ts}",
"index.js" "index.js"
@ -99,12 +94,5 @@
} }
} }
} }
},
"nyc": {
"tempDirectory": "coverage",
"reporter": [
"lcov",
"text"
]
} }
} }

View File

@ -43,28 +43,28 @@ describe('KMIP High Level Driver', () => {
it('should work with' + it('should work with' +
` x-name attribute: ${!!bucketNameAttributeName},` + ` x-name attribute: ${!!bucketNameAttributeName},` +
` compound creation: ${compoundCreateActivate}`, ` compound creation: ${compoundCreateActivate}`,
done => { done => {
const kmipClient = new KMIPClient(options, TTLVCodec, const kmipClient = new KMIPClient(options, TTLVCodec,
LoopbackServerTransport); LoopbackServerTransport);
const plaintext = Buffer.from(crypto.randomBytes(32)); const plaintext = Buffer.from(crypto.randomBytes(32));
async.waterfall([ async.waterfall([
next => kmipClient.createBucketKey('plop', logger, next), next => kmipClient.createBucketKey('plop', logger, next),
(id, next) => (id, next) =>
kmipClient.cipherDataKey(1, id, plaintext, kmipClient.cipherDataKey(1, id, plaintext,
logger, (err, ciphered) => { logger, (err, ciphered) => {
next(err, id, ciphered); next(err, id, ciphered);
}), }),
(id, ciphered, next) => (id, ciphered, next) =>
kmipClient.decipherDataKey( kmipClient.decipherDataKey(
1, id, ciphered, logger, (err, deciphered) => { 1, id, ciphered, logger, (err, deciphered) => {
assert(plaintext assert(plaintext
.compare(deciphered) === 0); .compare(deciphered) === 0);
next(err, id); next(err, id);
}), }),
(id, next) => (id, next) =>
kmipClient.destroyBucketKey(id, logger, next), kmipClient.destroyBucketKey(id, logger, next),
], done); ], done);
}); });
}); });
}); });
it('should succeed healthcheck with working KMIP client and server', done => { it('should succeed healthcheck with working KMIP client and server', done => {
@ -84,7 +84,7 @@ describe('KMIP High Level Driver', () => {
}, },
}; };
const kmipClient = new KMIPClient(options, TTLVCodec, const kmipClient = new KMIPClient(options, TTLVCodec,
LoopbackServerTransport); LoopbackServerTransport);
kmipClient.healthcheck(logger, err => { kmipClient.healthcheck(logger, err => {
assert.ifError(err); assert.ifError(err);
done(); done();

View File

@ -36,17 +36,17 @@ describe('KMIP Low Level Driver', () => {
const kmip = new KMIP(TTLVCodec, MirrorTransport, options); const kmip = new KMIP(TTLVCodec, MirrorTransport, options);
const requestPayload = fixture.payload(kmip); const requestPayload = fixture.payload(kmip);
kmip.request(logger, fixture.operation, kmip.request(logger, fixture.operation,
requestPayload, (err, response) => { requestPayload, (err, response) => {
if (err) { if (err) {
return done(err); return done(err);
} }
const responsePayload = response.lookup( const responsePayload = response.lookup(
'Response Message/Batch Item/Response Payload', 'Response Message/Batch Item/Response Payload'
)[0]; )[0];
assert.deepStrictEqual(responsePayload, assert.deepStrictEqual(responsePayload,
requestPayload); requestPayload);
return done(); return done();
}); });
}); });
}); });
}); });

View File

@ -14,68 +14,68 @@ describe('KMIP Transport Template Class', () => {
requestNumbers.forEach(iterations => { requestNumbers.forEach(iterations => {
it(`should survive ${iterations} iterations` + it(`should survive ${iterations} iterations` +
` with ${pipelineDepth}way pipeline`, ` with ${pipelineDepth}way pipeline`,
done => { done => {
const transport = new TransportTemplate( const transport = new TransportTemplate(
new EchoChannel, new EchoChannel,
{ {
pipelineDepth, pipelineDepth,
tls: { tls: {
port: 5696, port: 5696,
}, },
}); });
const request = Buffer.alloc(10).fill(6); const request = Buffer.alloc(10).fill(6);
async.times(iterations, (n, next) => { async.times(iterations, (n, next) => {
transport.send(logger, request, transport.send(logger, request,
(err, conversation, response) => { (err, conversation, response) => {
if (err) { if (err) {
return next(err); return next(err);
} }
if (request.compare(response) !== 0) { if (request.compare(response) !== 0) {
return next(Error('arg')); return next(Error('arg'));
} }
return next(); return next();
}); });
}, err => { }, err => {
transport.end(); transport.end();
done(err); done(err);
}); });
}); });
[true, false].forEach(doEmit => { [true, false].forEach(doEmit => {
it('should report errors to outstanding requests.' + it('should report errors to outstanding requests.' +
` w:${pipelineDepth}, i:${iterations}, e:${doEmit}`, ` w:${pipelineDepth}, i:${iterations}, e:${doEmit}`,
done => { done => {
const echoChannel = new EchoChannel; const echoChannel = new EchoChannel;
echoChannel.clog(); echoChannel.clog();
const transport = new TransportTemplate( const transport = new TransportTemplate(
echoChannel, echoChannel,
{ {
pipelineDepth, pipelineDepth,
tls: { tls: {
port: 5696, port: 5696,
}, },
}); });
const request = Buffer.alloc(10).fill(6); const request = Buffer.alloc(10).fill(6);
/* Using a for loop here instead of anything /* Using a for loop here instead of anything
* asynchronous, the callbacks get stuck in * asynchronous, the callbacks get stuck in
* the conversation queue and are unwind with * the conversation queue and are unwind with
* an error. It is the purpose of this test */ * an error. It is the purpose of this test */
for (let i = 0; i < iterations; ++i) { for (let i = 0; i < iterations; ++i) {
transport.send( transport.send(
logger, request, logger, request,
(err, conversation, response) => { (err, conversation, response) => {
assert(err); assert(err);
assert(!response); assert(!response);
}); });
} }
if (doEmit) { if (doEmit) {
echoChannel.emit('error', new Error('awesome')); echoChannel.emit('error', new Error('awesome'));
} else { } else {
transport.abortPipeline(echoChannel); transport.abortPipeline(echoChannel);
} }
transport.end(); transport.end();
done(); done();
}); });
}); });
}); });
}); });

View File

@ -1,463 +0,0 @@
const async = require('async');
const assert = require('assert');
const sinon = require('sinon');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const { errors, versioning } = require('../../../../index');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const BucketInfo = require('../../../../lib/models/BucketInfo');
const MetadataWrapper =
require('../../../../lib/storage/metadata/MetadataWrapper');
const genVID = require('../../../../lib/versioning/VersionID').generateVersionId;
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'test-bucket';
const replicationGroupId = 'RG001';
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27018 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
let uidCounter = 0;
function generateVersionId() {
return genVID(`${process.pid}.${uidCounter++}`,
replicationGroupId);
}
const variations = [
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0 },
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1 },
];
describe('MongoClientInterface::metadata.deleteObjectMD', () => {
let metadata;
let collection;
function getObjectCount(cb) {
collection.countDocuments((err, count) => {
if (err) {
cb(err);
}
cb(null, count);
});
}
function getObject(key, cb) {
collection.findOne({
_id: key,
}, {}, (err, doc) => {
if (err) {
return cb(err);
}
if (!doc) {
return cb(errors.NoSuchKey);
}
return cb(null, doc.value);
});
}
beforeAll(done => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27018',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.setup(done);
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
variations.forEach(variation => {
const itOnlyInV1 = variation.vFormat === 'v1' ? it : it.skip;
describe(`vFormat : ${variation.vFormat}`, () => {
beforeEach(done => {
const bucketMD = BucketInfo.fromObj({
_name: BUCKET_NAME,
_owner: 'testowner',
_ownerDisplayName: 'testdisplayname',
_creationDate: new Date().toJSON(),
_acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
_mdBucketModelVersion: 10,
_transient: false,
_deleted: false,
_serverSideEncryption: null,
_versioningConfiguration: null,
_locationConstraint: 'us-east-1',
_readLocationConstraint: null,
_cors: null,
_replicationConfiguration: null,
_lifecycleConfiguration: null,
_uid: '',
_isNFS: null,
ingestion: null,
});
async.series([
next => {
metadata.client.defaultBucketKeyFormat = variation.vFormat;
return next();
},
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
if (err) {
return next(err);
}
collection = metadata.client.getCollection(BUCKET_NAME);
return next();
}),
], done);
});
afterEach(done => {
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
it(`Should delete non versioned object ${variation.vFormat}`, done => {
const params = {
objName: 'non-deleted-object',
objVal: {
key: 'non-deleted-object',
versionId: 'null',
},
};
const versionParams = {
versioning: false,
versionId: null,
repairMaster: null,
};
return async.series([
next => {
// we put the master version of object
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// we put the master version of a second object
params.objName = 'object-to-deleted';
params.objVal.key = 'object-to-deleted';
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// We delete the first object
metadata.deleteObjectMD(BUCKET_NAME, params.objName, null, logger, next);
},
next => {
// Object must be removed
metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return next();
});
},
next => {
// only 1 object remaining in db
getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 1);
return next();
});
},
], done);
});
it(`Should not throw error when object non existent ${variation.vFormat}`, done => {
const objName = 'non-existent-object';
metadata.deleteObjectMD(BUCKET_NAME, objName, null, logger, err => {
assert.deepStrictEqual(err, null);
return done();
});
});
it(`Should not throw error when bucket non existent ${variation.vFormat}`, done => {
const objName = 'non-existent-object';
metadata.deleteObjectMD(BUCKET_NAME, objName, null, logger, err => {
assert.deepStrictEqual(err, null);
return done();
});
});
it(`Master should not be updated when non lastest version is deleted ${variation.vFormat}`, done => {
let versionId1 = null;
const params = {
objName: 'test-object',
objVal: {
key: 'test-object',
versionId: 'null',
},
vFormat: 'v0',
};
const versionParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
return async.series([
next => {
// we start by creating a new version and master
versionId1 = generateVersionId(this.replicationGroupId);
params.versionId = versionId1;
params.objVal.versionId = versionId1;
versionParams.versionId = versionId1;
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// we create a second version of the same object (master is updated)
params.objVal.versionId = 'version2';
versionParams.versionId = null;
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// we delete the first version
metadata.deleteObjectMD(BUCKET_NAME, params.objName, { versionId: versionId1 },
logger, next);
},
next => {
// the first version should no longer be available
metadata.getObjectMD(BUCKET_NAME, params.objName, { versionId: versionId1 }, logger, err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return next();
});
},
next => {
// master must be containing second version metadata
metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.notStrictEqual(data.versionId, versionId1);
return next();
});
},
next => {
// master and one version remaining in db
getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 2);
return next();
});
},
], done);
});
it(`Master should be updated when last version is deleted ${variation.vFormat}`, done => {
let versionId1;
let versionId2;
const params = {
objName: 'test-object',
objVal: {
key: 'test-object',
versionId: 'null',
isLast: false,
},
};
const versionParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
return async.series([
next => {
// we start by creating a new version and master
versionId1 = generateVersionId(this.replicationGroupId);
params.versionId = versionId1;
params.objVal.versionId = versionId1;
versionParams.versionId = versionId1;
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// we create a second version of the same object (master is updated)
// params.objVal.versionId = 'version2';
// versionParams.versionId = null;
versionId2 = generateVersionId(this.replicationGroupId);
params.versionId = versionId2;
params.objVal.versionId = versionId2;
versionParams.versionId = versionId2;
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// deleting latest version
metadata.deleteObjectMD(BUCKET_NAME, params.objName, { versionId: versionId2 },
logger, next);
},
next => {
// latest version must be removed
metadata.getObjectMD(BUCKET_NAME, params.objName, { versionId: versionId2 }, logger, err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return next();
});
},
next => {
// master must be updated to contain first version data
metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.versionId, versionId1);
return next();
});
},
next => {
// one master and version in the db
getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 2);
return next();
});
},
], done);
});
it(`Should fail when version id non existent ${variation.vFormat}`, done => {
const versionId = generateVersionId(this.replicationGroupId);
const objName = 'test-object';
metadata.deleteObjectMD(BUCKET_NAME, objName, { versionId }, logger, err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return done();
});
});
itOnlyInV1(`Should create master when delete marker removed ${variation.vFormat}`, done => {
const objVal = {
key: 'test-object',
isDeleteMarker: false,
};
const params = {
versioning: true,
versionId: null,
repairMaster: null,
};
let firstVersionVersionId;
let deleteMarkerVersionId;
async.series([
// We first create a new version and master
next => metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, (err, res) => {
if (err) {
return next(err);
}
firstVersionVersionId = JSON.parse(res).versionId;
return next();
}),
// putting a delete marker as last version
next => {
objVal.isDeleteMarker = true;
params.versionId = null;
return metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, (err, res) => {
if (err) {
return next(err);
}
deleteMarkerVersionId = JSON.parse(res).versionId;
return next();
});
},
next => {
// using fake clock to override the setTimeout used by the repair
const clock = sinon.useFakeTimers();
return metadata.deleteObjectMD(BUCKET_NAME, 'test-object', { versionId: deleteMarkerVersionId },
logger, () => {
// running the repair callback
clock.runAll();
clock.restore();
return next();
});
},
// waiting for the repair callback to finish
next => setTimeout(next, 100),
// master should be created
next => {
getObject('\x7fMtest-object', (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, 'test-object');
assert.strictEqual(object.versionId, firstVersionVersionId);
assert.strictEqual(object.isDeleteMarker, false);
return next();
});
},
], done);
});
itOnlyInV1(`Should delete master when delete marker becomes last version ${variation.vFormat}`, done => {
const objVal = {
key: 'test-object',
isDeleteMarker: false,
};
const params = {
versioning: true,
versionId: null,
repairMaster: null,
};
let versionId;
async.series([
// We first create a new version and master
next => metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, next),
// putting a delete marker as last version
next => {
objVal.isDeleteMarker = true;
params.versionId = null;
return metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, next);
},
// putting new version on top of delete marker
next => {
objVal.isDeleteMarker = false;
return metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, (err, res) => {
if (err) {
return next(err);
}
versionId = JSON.parse(res).versionId;
return next();
});
},
next => {
// using fake clock to override the setTimeout used by the repair
const clock = sinon.useFakeTimers();
return metadata.deleteObjectMD(BUCKET_NAME, 'test-object', { versionId },
logger, () => {
// running the repair callback
clock.runAll();
clock.restore();
return next();
});
},
// waiting for the repair callback to finish
next => setTimeout(next, 100),
// master must be deleted
next => {
getObject('\x7fMtest-object', err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return next();
});
},
], done);
});
});
});
});

View File

@ -1,283 +0,0 @@
const async = require('async');
const assert = require('assert');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const { errors, versioning } = require('../../../../index');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const BucketInfo = require('../../../../lib/models/BucketInfo');
const MetadataWrapper =
require('../../../../lib/storage/metadata/MetadataWrapper');
const genVID = versioning.VersionID.generateVersionId;
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const { formatMasterKey } = require('../../../../lib/storage/metadata/mongoclient/utils');
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'test-bucket';
const replicationGroupId = 'RG001';
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27019 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
let uidCounter = 0;
function generateVersionId() {
return genVID(`${process.pid}.${uidCounter++}`,
replicationGroupId);
}
const variations = [
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0 },
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1 },
];
describe('MongoClientInterface::metadata.getObjectMD', () => {
let metadata;
let collection;
let versionId1;
let versionId2;
let params = {
objName: 'pfx1-test-object',
objVal: {
key: 'pfx1-test-object',
versionId: 'null',
},
};
function updateMasterObject(objName, versionId, objVal, vFormat, cb) {
const mKey = formatMasterKey(objName, vFormat);
collection.updateOne(
{
_id: mKey,
$or: [{
'value.versionId': {
$exists: false,
},
},
{
'value.versionId': {
$gt: versionId,
},
},
],
},
{
$set: { _id: mKey, value: objVal },
},
{ upsert: true },
err => {
if (err) {
return cb(err);
}
return cb(null);
});
}
beforeAll(done => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27019',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.setup(done);
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
variations.forEach(variation => {
const itOnlyInV1 = variation.vFormat === 'v1' ? it : it.skip;
describe(`vFormat : ${variation.vFormat}`, () => {
beforeEach(done => {
const bucketMD = BucketInfo.fromObj({
_name: BUCKET_NAME,
_owner: 'testowner',
_ownerDisplayName: 'testdisplayname',
_creationDate: new Date().toJSON(),
_acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
_mdBucketModelVersion: 10,
_transient: false,
_deleted: false,
_serverSideEncryption: null,
_versioningConfiguration: null,
_locationConstraint: 'us-east-1',
_readLocationConstraint: null,
_cors: null,
_replicationConfiguration: null,
_lifecycleConfiguration: null,
_uid: '',
_isNFS: null,
ingestion: null,
});
const versionParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
async.series([
next => {
metadata.client.defaultBucketKeyFormat = variation.vFormat;
return next();
},
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
if (err) {
return next(err);
}
collection = metadata.client.getCollection(BUCKET_NAME);
return next();
}),
next => {
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, (err, res) => {
if (err) {
return next(err);
}
versionId1 = JSON.parse(res).versionId;
return next(null);
});
},
next => {
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, (err, res) => {
if (err) {
return next(err);
}
versionId2 = JSON.parse(res).versionId;
return next(null);
});
},
], done);
});
afterEach(done => {
// reset params
params = {
objName: 'pfx1-test-object',
objVal: {
key: 'pfx1-test-object',
versionId: 'null',
},
};
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
it(`Should return latest version of object ${variation.it}`, done =>
metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, params.objName);
assert.strictEqual(object.versionId, versionId2);
return done();
}));
it(`Should return the specified version of object ${variation.it}`, done =>
metadata.getObjectMD(BUCKET_NAME, params.objName, { versionId: versionId1 }, logger, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, params.objName);
assert.strictEqual(object.versionId, versionId1);
return done();
}));
it(`Should throw error when version non existent ${variation.it}`, done => {
const versionId = '1234567890';
return metadata.getObjectMD(BUCKET_NAME, params.objName, { versionId }, logger, (err, object) => {
assert.deepStrictEqual(object, undefined);
assert.deepStrictEqual(err, errors.NoSuchKey);
return done();
});
});
it(`Should throw error when object non existent ${variation.it}`, done => {
const objName = 'non-existent-object';
return metadata.getObjectMD(BUCKET_NAME, objName, null, logger, err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return done();
});
});
it(`Should throw error when object non existent ${variation.it}`, done => {
const bucketName = 'non-existent-bucket';
return metadata.getObjectMD(bucketName, params.objName, null, logger, (err, object) => {
assert.deepStrictEqual(object, undefined);
assert.deepStrictEqual(err, errors.NoSuchKey);
return done();
});
});
it(`Should return latest version when master is PHD ${variation.it}`, done => {
async.series([
next => {
const objectName = variation.vFormat === 'v0' ? 'pfx1-test-object' : '\x7fMpfx1-test-object';
// adding isPHD flag to master
const phdVersionId = generateVersionId();
params.objVal.versionId = phdVersionId;
params.objVal.isPHD = true;
updateMasterObject(objectName, phdVersionId, params.objVal,
variation.vFormat, next);
},
// Should return latest object version
next => metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, params.objName);
assert.strictEqual(object.versionId, versionId2);
delete params.isPHD;
return next();
}),
], done);
});
itOnlyInV1(`Should return last version when master deleted ${variation.vFormat}`, done => {
const versioningParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
async.series([
// putting a delete marker as last version
next => {
params.versionId = null;
params.objVal.isDeleteMarker = true;
return metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal, versioningParams,
logger, next);
},
next => metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, params.objName);
assert.strictEqual(object.isDeleteMarker, true);
params.objVal.isDeleteMarker = null;
return next();
}),
], done);
});
});
});
});

View File

@ -1,412 +0,0 @@
const async = require('async');
const assert = require('assert');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const BucketInfo = require('../../../../lib/models/BucketInfo');
const MetadataWrapper =
require('../../../../lib/storage/metadata/MetadataWrapper');
const { versioning } = require('../../../../index');
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'test-bucket';
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27020 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
const variations = [
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0 },
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1 },
];
describe('MongoClientInterface::metadata.listObject', () => {
let metadata;
function putBulkObjectVersions(bucketName, objName, objVal, params, versionNb, cb) {
let count = 0;
async.whilst(
() => count < versionNb,
cbIterator => {
count++;
// eslint-disable-next-line
return metadata.putObjectMD(bucketName, objName, objVal, params,
logger, cbIterator);
},
err => {
if (err) {
return cb(err);
}
return cb(null);
},
);
}
beforeAll(done => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27020',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.setup(done);
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
variations.forEach(variation => {
beforeEach(done => {
const bucketMD = BucketInfo.fromObj({
_name: BUCKET_NAME,
_owner: 'testowner',
_ownerDisplayName: 'testdisplayname',
_creationDate: new Date().toJSON(),
_acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
_mdBucketModelVersion: 10,
_transient: false,
_deleted: false,
_serverSideEncryption: null,
_versioningConfiguration: null,
_locationConstraint: 'us-east-1',
_readLocationConstraint: null,
_cors: null,
_replicationConfiguration: null,
_lifecycleConfiguration: null,
_uid: '',
_isNFS: null,
ingestion: null,
});
const versionParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
async.series([
next => {
metadata.client.defaultBucketKeyFormat = variation.vFormat;
return next();
},
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
if (err) {
return next(err);
}
return next();
}),
next => {
const params = {
objName: 'pfx1-test-object',
objVal: {
key: 'pfx1-test-object',
versionId: 'null',
},
nbVersions: 5,
};
putBulkObjectVersions(BUCKET_NAME, params.objName, params.objVal, versionParams,
params.nbVersions, next);
},
next => {
const params = {
objName: 'pfx2-test-object',
objVal: {
key: 'pfx2-test-object',
versionId: 'null',
},
nbVersions: 5,
};
putBulkObjectVersions(BUCKET_NAME, params.objName, params.objVal, versionParams,
params.nbVersions, next);
},
next => {
const params = {
objName: 'pfx3-test-object',
objVal: {
key: 'pfx3-test-object',
versionId: 'null',
},
nbVersions: 5,
};
putBulkObjectVersions(BUCKET_NAME, params.objName, params.objVal, versionParams,
params.nbVersions, next);
},
], done);
});
afterEach(done => {
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
it(`Should list master versions of objects ${variation.it}`, done => {
const params = {
listingType: 'DelimiterMaster',
maxKeys: 100,
};
return metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.Contents.length, 3);
assert.strictEqual(data.Contents[0].key, 'pfx1-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx2-test-object');
assert.strictEqual(data.Contents[2].key, 'pfx3-test-object');
return done();
});
});
it(`Should truncate list of master versions of objects ${variation.it}`, done => {
const params = {
listingType: 'DelimiterMaster',
maxKeys: 2,
};
return metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.Contents.length, 2);
assert.strictEqual(data.Contents[0].key, 'pfx1-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx2-test-object');
return done();
});
});
it(`Should list master versions of objects that start with prefix ${variation.it}`, done => {
const bucketName = BUCKET_NAME;
const params = {
listingType: 'DelimiterMaster',
maxKeys: 100,
prefix: 'pfx2',
};
return metadata.listObject(bucketName, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx2-test-object');
return done();
});
});
it(`Should return empty results when bucket non existent (master) ${variation.it}`, done => {
const bucketName = 'non-existent-bucket';
const params = {
listingType: 'DelimiterMaster',
maxKeys: 100,
};
return metadata.listObject(bucketName, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert(data);
assert.strictEqual(data.Contents.length, 0);
return done();
});
});
it(`Should list all versions of objects ${variation.it}`, done => {
const bucketName = BUCKET_NAME;
const params = {
listingType: 'DelimiterVersions',
maxKeys: 1000,
};
const versionsPerKey = {};
return metadata.listObject(bucketName, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.Versions.length, 15);
data.Versions.forEach(version => {
versionsPerKey[version.key] = (versionsPerKey[version.key] || 0) + 1;
});
assert.strictEqual(versionsPerKey['pfx1-test-object'], 5);
assert.strictEqual(versionsPerKey['pfx2-test-object'], 5);
assert.strictEqual(versionsPerKey['pfx3-test-object'], 5);
return done();
});
});
it(`Should truncate list of master versions of objects ${variation.it}`, done => {
const params = {
listingType: 'DelimiterVersions',
maxKeys: 5,
};
const versionsPerKey = {};
return metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.Versions.length, 5);
data.Versions.forEach(version => {
versionsPerKey[version.key] = (versionsPerKey[version.key] || 0) + 1;
});
assert.strictEqual(versionsPerKey['pfx1-test-object'], 5);
return done();
});
});
it(`Should list versions of objects that start with prefix ${variation.it}`, done => {
const params = {
listingType: 'DelimiterVersions',
maxKeys: 100,
prefix: 'pfx2',
};
const versionsPerKey = {};
return metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.Versions.length, 5);
data.Versions.forEach(version => {
versionsPerKey[version.key] = (versionsPerKey[version.key] || 0) + 1;
});
assert.strictEqual(versionsPerKey['pfx2-test-object'], 5);
return done();
});
});
it(`Should return empty results when bucket not existing (version) ${variation.it}`, done => {
const bucketName = 'non-existent-bucket';
const params = {
listingType: 'DelimiterVersions',
maxKeys: 100,
};
return metadata.listObject(bucketName, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert(data);
assert.strictEqual(data.Versions.length, 0);
return done();
});
});
it(`should check entire list with pagination (version) ${variation.it}`, done => {
const versionsPerKey = {};
const bucketName = BUCKET_NAME;
const get = (maxKeys, keyMarker, versionIdMarker, cb) => metadata.listObject(bucketName, {
listingType: 'DelimiterVersions',
maxKeys,
keyMarker,
versionIdMarker,
}, logger, (err, res) => {
if (err) {
return cb(err);
}
res.Versions.forEach(version => {
versionsPerKey[version.key] = (versionsPerKey[version.key] || 0) + 1;
});
if (res.IsTruncated) {
return get(maxKeys, res.NextKeyMarker, res.NextVersionIdMarker, cb);
}
return cb(null);
});
return get(3, null, null, err => {
assert.deepStrictEqual(err, null);
assert.strictEqual(Object.keys(versionsPerKey).length, 3);
assert.strictEqual(versionsPerKey['pfx1-test-object'], 5);
assert.strictEqual(versionsPerKey['pfx2-test-object'], 5);
assert.strictEqual(versionsPerKey['pfx3-test-object'], 5);
done();
});
});
it(`should not list phd master key when listing masters ${variation.it}`, done => {
const objVal = {
key: 'pfx1-test-object',
versionId: 'null',
};
const versionParams = {
versioning: true,
};
const params = {
listingType: 'DelimiterMaster',
prefix: 'pfx1',
};
let versionId;
let lastVersionId;
async.series([
next => metadata.putObjectMD(BUCKET_NAME, 'pfx1-test-object', objVal, versionParams,
logger, (err, res) => {
if (err) {
return next(err);
}
versionId = JSON.parse(res).versionId;
return next(null);
}),
next => metadata.putObjectMD(BUCKET_NAME, 'pfx1-test-object', objVal, versionParams,
logger, (err, res) => {
if (err) {
return next(err);
}
lastVersionId = JSON.parse(res).versionId;
return next(null);
}),
// when deleting the last version of an object a PHD master is created
// and kept for 15s before it's repaired
next => metadata.deleteObjectMD(BUCKET_NAME, 'pfx1-test-object', { versionId: lastVersionId },
logger, next),
next => metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.Contents[0].value.VersionId, versionId);
return next();
}),
], done);
});
it(`should not list phd master key when listing versions ${variation.it}`, done => {
const objVal = {
key: 'pfx1-test-object',
versionId: 'null',
};
const versionParams = {
versioning: true,
};
const params = {
listingType: 'DelimiterVersions',
prefix: 'pfx1',
};
let lastVersionId;
let versionIds;
async.series([
next => metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.Versions.length, 5);
versionIds = data.Versions.map(version => version.VersionId);
return next();
}),
next => metadata.putObjectMD(BUCKET_NAME, 'pfx1-test-object', objVal, versionParams,
logger, (err, res) => {
if (err) {
return next(err);
}
lastVersionId = JSON.parse(res).versionId;
return next(null);
}),
// when deleting the last version of an object a PHD master is created
// and kept for 15s before it's repaired
next => metadata.deleteObjectMD(BUCKET_NAME, 'pfx1-test-object', { versionId: lastVersionId },
logger, next),
next => metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
const newVersionIds = data.Versions.map(version => version.VersionId);
assert.strictEqual(data.Versions.length, 5);
assert(versionIds.every(version => newVersionIds.includes(version)));
return next();
}),
], done);
});
});
});

View File

@ -1,429 +0,0 @@
const async = require('async');
const assert = require('assert');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const { errors, versioning } = require('../../../../index');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const BucketInfo = require('../../../../lib/models/BucketInfo');
const MetadataWrapper =
require('../../../../lib/storage/metadata/MetadataWrapper');
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'test-bucket';
const OBJECT_NAME = 'test-object';
const VERSION_ID = '98451712418844999999RG001 22019.0';
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27021 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
const variations = [
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0 },
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1 },
];
describe('MongoClientInterface:metadata.putObjectMD', () => {
let metadata;
let collection;
function getObject(key, cb) {
collection.findOne({
_id: key,
}, {}, (err, doc) => {
if (err) {
return cb(err);
}
if (!doc) {
return cb(errors.NoSuchKey);
}
return cb(null, doc.value);
});
}
function getObjectCount(cb) {
collection.countDocuments((err, count) => {
if (err) {
cb(err);
}
cb(null, count);
});
}
beforeAll(done => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27021',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.setup(done);
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
variations.forEach(variation => {
const itOnlyInV1 = variation.vFormat === 'v1' ? it : it.skip;
describe(`vFormat : ${variation.vFormat}`, () => {
beforeEach(done => {
const bucketMD = BucketInfo.fromObj({
_name: BUCKET_NAME,
_owner: 'testowner',
_ownerDisplayName: 'testdisplayname',
_creationDate: new Date().toJSON(),
_acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
_mdBucketModelVersion: 10,
_transient: false,
_deleted: false,
_serverSideEncryption: null,
_versioningConfiguration: null,
_locationConstraint: 'us-east-1',
_readLocationConstraint: null,
_cors: null,
_replicationConfiguration: null,
_lifecycleConfiguration: null,
_uid: '',
_isNFS: null,
ingestion: null,
});
async.series([
next => {
metadata.client.defaultBucketKeyFormat = variation.vFormat;
return next();
},
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
if (err) {
return next(err);
}
collection = metadata.client.getCollection(BUCKET_NAME);
return next();
}),
], done);
});
afterEach(done => {
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
it(`Should put a new non versionned object ${variation.it}`, done => {
const objVal = {
key: OBJECT_NAME,
versionId: 'null',
updated: false,
};
const params = {
versioning: null,
versionId: null,
repairMaster: null,
};
async.series([
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
next => {
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
getObject(key, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, OBJECT_NAME);
return next();
});
},
// When versionning not active only one document is created (master)
next => getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 1);
return next();
}),
], done);
});
it(`Should update the metadata ${variation.it}`, done => {
const objVal = {
key: OBJECT_NAME,
versionId: 'null',
updated: false,
};
const params = {
versioning: null,
versionId: null,
repairMaster: null,
};
async.series([
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
next => {
objVal.updated = true;
metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
},
// object metadata must be updated
next => {
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
getObject(key, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, OBJECT_NAME);
assert.strictEqual(object.updated, true);
return next();
});
},
// Only a master version should be created
next => getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 1);
return next();
}),
], done);
});
it(`Should put versionned object with the specific versionId ${variation.it}`, done => {
const objVal = {
key: OBJECT_NAME,
versionId: VERSION_ID,
updated: false,
};
const params = {
versioning: true,
versionId: VERSION_ID,
repairMaster: null,
};
async.series([
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
// checking if metadata corresponds to what was given to the function
next => {
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
getObject(key, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, OBJECT_NAME);
assert.strictEqual(object.versionId, VERSION_ID);
return next();
});
},
// We'll have one master and one version
next => getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 2);
return next();
}),
], done);
});
it(`Should put new version and update master ${variation.it}`, done => {
const objVal = {
key: OBJECT_NAME,
versionId: VERSION_ID,
updated: false,
};
const params = {
versioning: true,
versionId: null,
repairMaster: null,
};
let versionId = null;
async.series([
// We first create a master and a version
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
versionId = JSON.parse(data).versionId;
return next();
}),
// We put another version of the object
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
// Master must be updated
next => {
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
getObject(key, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, OBJECT_NAME);
assert.notStrictEqual(object.versionId, versionId);
return next();
});
},
// we'll have two versions and one master
next => getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 3);
return next();
}),
], done);
});
it(`Should update master when versionning is disabled ${variation.it}`, done => {
const objVal = {
key: OBJECT_NAME,
versionId: VERSION_ID,
updated: false,
};
const params = {
versioning: true,
versionId: null,
repairMaster: null,
};
let versionId = null;
async.series([
// We first create a new version and master
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
versionId = JSON.parse(data).versionId;
return next();
}),
next => {
// Disabling versionning and putting new version
params.versioning = false;
params.versionId = '';
return metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
},
// Master must be updated
next => {
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
getObject(key, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, OBJECT_NAME);
assert.notStrictEqual(object.versionId, versionId);
return next();
});
},
// The second put shouldn't create a new version
next => getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 2);
return next();
}),
], done);
});
it(`Should update latest version and repair master ${variation.it}`, done => {
const objVal = {
key: OBJECT_NAME,
versionId: VERSION_ID,
updated: false,
};
const params = {
versioning: true,
versionId: VERSION_ID,
repairMaster: null,
};
async.series([
// We first create a new version and master
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
next => {
// Updating the version and repairing master
params.repairMaster = true;
objVal.updated = true;
return metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
},
// Master must be updated
next => {
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
getObject(key, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, OBJECT_NAME);
assert.strictEqual(object.versionId, VERSION_ID);
assert.strictEqual(object.updated, true);
return next();
});
},
// The second put shouldn't create a new version
next => getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 2);
return next();
}),
], done);
});
itOnlyInV1(`Should delete master when last version is delete marker ${variation.it}`, done => {
const objVal = {
key: OBJECT_NAME,
versionId: VERSION_ID,
updated: false,
isDeleteMarker: false,
};
const params = {
versioning: true,
versionId: VERSION_ID,
repairMaster: null,
};
async.series([
// We first create a new version and master
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
// putting a delete marker as last version
next => {
objVal.isDeleteMarker = true;
params.versionId = null;
return metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
},
// master must be deleted
next => getObject('\x7fMtest-object', err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return next();
}),
], done);
});
itOnlyInV1(`Should create master when new version is put on top of delete marker ${variation.it}`, done => {
const objVal = {
key: OBJECT_NAME,
versionId: VERSION_ID,
updated: false,
isDeleteMarker: false,
};
const params = {
versioning: true,
versionId: VERSION_ID,
repairMaster: null,
};
async.series([
// We first create a new version and master
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
// putting a delete marker as last version
next => {
objVal.isDeleteMarker = true;
params.versionId = null;
return metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
},
// We put a new version on top of delete marker
next => {
objVal.isDeleteMarker = false;
objVal.updated = true;
return metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
},
// master must be created
next => getObject('\x7fMtest-object', (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, OBJECT_NAME);
assert.strictEqual(object.updated, true);
assert.strictEqual(object.isDeleteMarker, false);
return next();
}),
], done);
});
});
});
});

View File

@ -1,330 +0,0 @@
const async = require('async');
const assert = require('assert');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const { errors, versioning } = require('../../../../index');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const BucketInfo = require('../../../../lib/models/BucketInfo');
const MetadataWrapper =
require('../../../../lib/storage/metadata/MetadataWrapper');
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const IMP_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'testbucket';
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27022 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
describe('MongoClientInterface:withCond', () => {
let metadata;
const variations = [
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0 },
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1 },
];
beforeAll(done => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27022',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMP_NAME, opts, null, logger);
metadata.setup(done);
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
variations.forEach(variation => {
describe('::putObjectWithCond', () => {
beforeEach(done => {
const bucketMD = BucketInfo.fromObj({
_name: BUCKET_NAME,
_owner: 'testowner',
_ownerDisplayName: 'testdisplayname',
_creationDate: new Date().toJSON(),
_acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
_mdBucketModelVersion: 10,
_transient: false,
_deleted: false,
_serverSideEncryption: null,
_versioningConfiguration: null,
_locationConstraint: 'us-east-1',
_readLocationConstraint: null,
_cors: null,
_replicationConfiguration: null,
_lifecycleConfiguration: null,
_uid: '',
_isNFS: null,
ingestion: null,
});
async.series([
next => {
metadata.client.defaultBucketKeyFormat = variation.vFormat;
return next();
},
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, next),
], done);
});
afterEach(done => {
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
const tests = [
[
`should upsert object if an existing object does not exist ${variation.it}`,
{
initVal: null,
upsertVal: { value: { number: 42, string: 'forty-two' } },
conditions: { value: { number: 24 } },
expectedVal: { value: { number: 42, string: 'forty-two' } },
error: null,
},
],
[
`should not update an existing object if the conditions fails ${variation.it}`,
{
initVal: { value: { number: 0, string: 'zero' } },
upsertVal: { value: { number: 42, string: 'forty-two' } },
conditions: { value: { number: 24 } },
expectedVal: { value: { number: 0, string: 'zero' } },
error: errors.InternalError,
},
],
[
`should not update an existing object if the conditions fails ${variation.it}`,
{
initVal: { value: { number: 0, string: 'zero' } },
upsertVal: { value: { number: 42, string: 'forty-two' } },
conditions: { value: { string: { $eq: 'twenty-four' } } },
expectedVal: { value: { number: 0, string: 'zero' } },
error: errors.InternalError,
},
],
[
`should not update an existing object if the conditions fails ${variation.it}`,
{
initVal: { value: { number: 0, string: 'zero' } },
upsertVal: { value: { number: 42, string: 'forty-two' } },
conditions: {
value: {
string: { $eq: 'twenty-four' },
number: { $eq: 0 },
},
},
expectedVal: { value: { number: 0, string: 'zero' } },
error: errors.InternalError,
},
],
[
`should update an existing object if the conditions passes ${variation.it}`,
{
initVal: { value: { number: 24, string: 'twenty-four' } },
upsertVal: { value: { number: 42, string: 'forty-two' } },
conditions: { value: { number: 24 } },
expectedVal: { value: { number: 42, string: 'forty-two' } },
error: null,
},
],
[
`should update an existing object if the conditions passes ${variation.it}`,
{
initVal: { value: { number: 24, string: 'twenty-four' } },
upsertVal: { value: { number: 42, string: 'forty-two' } },
conditions: { value: { string: { $eq: 'twenty-four' } } },
expectedVal: { value: { number: 42, string: 'forty-two' } },
error: null,
},
],
[
`should update an existing object if the conditions passes ${variation.it}`,
{
initVal: { value: { number: 24, string: 'twenty-four' } },
upsertVal: { value: { number: 42, string: 'forty-two' } },
conditions: {
value: {
string: { $eq: 'twenty-four' },
number: { $eq: 24 },
},
},
expectedVal: { value: { number: 42, string: 'forty-two' } },
error: null,
},
],
];
tests.forEach(([msg, testCase]) => it(msg, done => {
const objectKey = 'testkey';
const {
initVal, upsertVal, conditions, expectedVal, error,
} = testCase;
const params = { conditions };
async.series([
next => {
if (!initVal) {
return next();
}
return metadata.putObjectMD(BUCKET_NAME, objectKey, initVal,
{}, logger, next);
},
next => metadata.putObjectWithCond(BUCKET_NAME, objectKey,
upsertVal, params, logger, err => {
if (error) {
assert.deepStrictEqual(err, error);
return next();
}
assert(!err);
return next();
}),
next => metadata.getObjectMD(BUCKET_NAME, objectKey, {}, logger,
(err, res) => {
assert(!err);
assert.deepStrictEqual(res, expectedVal);
next();
}),
], done);
}));
});
describe('::deleteObjectWithCond', () => {
const tests = [
[
`should return no such key if the object does not exist ${variation.it}`,
{
initVal: null,
conditions: { value: { number: 24 } },
expectedVal: null,
error: errors.NoSuchKey,
},
],
[
`should return no such key if the conditions fails ${variation.it}`,
{
initVal: { value: { number: 0, string: 'zero' } },
conditions: { value: { number: { $eq: 24 } } },
expectedVal: { value: { number: 0, string: 'zero' } },
error: errors.NoSuchKey,
},
],
[
`should return no such key if the conditions fails ${variation.it}`,
{
initVal: { value: { number: 0, string: 'zero' } },
conditions: { value: { string: 'twenty-four' } },
expectedVal: { value: { number: 0, string: 'zero' } },
error: errors.NoSuchKey,
},
],
[
`should return no such key if the conditions fails ${variation.it}`,
{
initVal: { value: { number: 0, string: 'zero' } },
conditions: {
value: {
string: 'twenty-four',
number: { $eq: 0 },
},
},
expectedVal: { value: { number: 0, string: 'zero' } },
error: errors.NoSuchKey,
},
],
[
`should successfully delete matched object ${variation.it}`,
{
initVal: { value: { number: 24, string: 'twenty-four' } },
conditions: { value: { number: 24 } },
expectedVal: null,
error: null,
},
],
[
`should successfully delete matched object ${variation.it}`,
{
initVal: { value: { number: 24, string: 'twenty-four' } },
conditions: { value: { string: { $eq: 'twenty-four' } } },
expectedVal: null,
error: null,
},
],
[
`should successfully delete matched object ${variation.it}`,
{
initVal: { value: { number: 24, string: 'twenty-four' } },
conditions: {
value: {
string: { $eq: 'twenty-four' },
number: { $eq: 24 },
},
},
expectedVal: null,
error: null,
},
],
];
tests.forEach(([msg, testCase]) => it(msg, done => {
const objectKey = 'testkey';
const { initVal, conditions, expectedVal, error } = testCase;
const params = { conditions };
async.series([
next => {
if (!initVal) {
return next();
}
return metadata.putObjectMD(BUCKET_NAME, objectKey, initVal,
{}, logger, next);
},
next => metadata.deleteObjectWithCond(BUCKET_NAME, objectKey,
params, logger, err => {
if (error) {
assert.deepStrictEqual(err, error);
return next();
}
assert(!err);
return next();
}),
next => metadata.getObjectMD(BUCKET_NAME, objectKey, {}, logger,
(err, res) => {
if (expectedVal) {
assert.deepStrictEqual(res, expectedVal);
} else {
assert.deepStrictEqual(err, errors.NoSuchKey);
}
return next();
}),
], done);
}));
});
});
});

View File

@ -1,319 +0,0 @@
'use strict'; // eslint-disable-line strict
const werelogs = require('werelogs');
const assert = require('assert');
const async = require('async');
const logger = new werelogs.Logger('MetadataProxyServer', 'debug', 'debug');
const MetadataWrapper =
require('../../../lib/storage/metadata/MetadataWrapper');
const BucketRoutes =
require('../../../lib/storage/metadata/proxy/BucketdRoutes');
const metadataWrapper = new MetadataWrapper('mem', {}, null, logger);
const { RequestDispatcher } = require('../../utils/mdProxyUtils');
const routes = new BucketRoutes(metadataWrapper, logger);
const dispatcher = new RequestDispatcher(routes);
const Bucket = 'test';
const bucketInfo = {
acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
name: Bucket,
owner: '9d8fe19a78974c56dceb2ea4a8f01ed0f5fecb9d29f80e9e3b84104e4a3ea520',
ownerDisplayName: 'anonymousCoward',
creationDate: '2018-06-04T17:45:42.592Z',
mdBucketModelVersion: 8,
transient: false,
deleted: false,
serverSideEncryption: null,
versioningConfiguration: null,
locationConstraint: 'us-east-1',
readLocationConstraint: 'us-east-1',
cors: null,
replicationConfiguration: null,
lifecycleConfiguration: null,
uid: 'fea97818-6a9a-11e8-9777-e311618cc5d4',
isNFS: null,
};
const objects = [
'aaa',
'bbb/xaa',
'bbb/xbb',
'bbb/xcc',
'ccc',
'ddd',
];
function _getExpectedListing(prefix, objects) {
const filtered = objects.map(key => {
const deprefixed = key.slice(prefix.length);
return deprefixed.replace(/[/].*/, '/');
});
const keySet = {};
return filtered.filter(key => {
if (keySet[key]) {
return false;
}
if (key === '') {
return false;
}
keySet[key] = true;
return true;
});
}
function _listingURL(prefix, marker) {
const reSlash = /[/]/g;
const escapedPrefix = prefix.replace(reSlash, '%2F');
const escapedMarker = marker.replace(reSlash, '%2F');
return `/default/bucket/${Bucket}?delimiter=%2F&prefix=` +
`${escapedPrefix}&maxKeys=1&marker=${escapedMarker}`;
}
function _listObjects(prefix, objects, cb) {
const keys = _getExpectedListing(prefix, objects);
const markers = keys.slice(0);
markers.unshift(undefined);
const lastKey = keys[keys.length - 1];
const listing = keys.map((key, index) => ({
key,
marker: markers[index],
NextMarker: markers[index + 1],
IsTruncated: key !== lastKey,
isPrefix: key.endsWith('/'),
}));
async.mapLimit(listing, 5, (obj, next) => {
const currentMarker = obj.marker === undefined ? '' : obj.marker;
dispatcher.get(_listingURL(prefix, prefix + currentMarker),
(err, response, body) => {
if (err) {
return next(err);
}
if (obj.isPrefix) {
assert.strictEqual(body.Contents.length, 0);
assert.strictEqual(body.CommonPrefixes.length,
1);
assert.strictEqual(body.CommonPrefixes[0],
prefix + obj.key);
} else {
assert.strictEqual(body.Contents.length, 1);
assert.strictEqual(body.CommonPrefixes.length,
0);
assert.strictEqual(body.Contents[0].key,
prefix + obj.key);
}
assert.strictEqual(body.IsTruncated,
obj.IsTruncated);
if (body.IsTruncated) {
assert.strictEqual(body.NextMarker,
prefix + obj.NextMarker);
}
return next();
});
}, err => cb(err));
}
function _createObjects(objects, cb) {
async.mapLimit(objects, 5, (key, next) => {
dispatcher.post(`/default/bucket/${Bucket}/${key}`,
{ key }, next);
}, err => {
cb(err);
});
}
function _readObjects(objects, cb) {
async.mapLimit(objects, 5, (key, next) => {
dispatcher.get(`/default/bucket/${Bucket}/${key}`,
(err, response, body) => {
assert.deepStrictEqual(body.key, key);
next(err);
});
}, err => {
cb(err);
});
}
function _deleteObjects(objects, cb) {
async.mapLimit(objects, 5, (key, next) => {
dispatcher.delete(`/default/bucket/${Bucket}/${key}`,
err => next(err));
}, err => {
cb(err);
});
}
describe('Basic Metadata Proxy Server test',
() => {
jest.setTimeout(10000);
it('Shoud get the metadataInformation', done => {
dispatcher.get('/default/metadataInformation',
(err, response, body) => {
if (err) {
return done(err);
}
assert.deepStrictEqual(
body, { metadataVersion: 2 });
return done();
});
});
});
describe('Basic Metadata Proxy Server CRUD test', () => {
jest.setTimeout(10000);
beforeEach(done => {
dispatcher.post(`/default/bucket/${Bucket}`, bucketInfo,
done);
});
afterEach(done => {
dispatcher.delete(`/default/bucket/${Bucket}`, done);
});
it('Should get the bucket attributes', done => {
dispatcher.get(`/default/attributes/${Bucket}`,
(err, response, body) => {
if (err) {
return done(err);
}
assert.deepStrictEqual(body.name,
bucketInfo.name);
return done();
});
});
it('Should crud an object', done => {
async.waterfall([
next => dispatcher.post(`/default/bucket/${Bucket}/test1`,
{ foo: 'gabu' }, err => next(err)),
next => dispatcher.get(`/default/bucket/${Bucket}/test1`,
(err, response, body) => {
if (!err) {
assert.deepStrictEqual(body.foo,
'gabu');
next(err);
}
}),
next => dispatcher.post(`/default/bucket/${Bucket}/test1`,
{ foo: 'zome' }, err => next(err)),
next => dispatcher.get(`/default/bucket/${Bucket}/test1`,
(err, response, body) => {
if (!err) {
assert.deepStrictEqual(body.foo,
'zome');
next(err);
}
}),
next => dispatcher.delete(`/default/bucket/${Bucket}/test1`,
err => next(err)),
], err => done(err));
});
it('Should list objects', done => {
async.waterfall([
next => _createObjects(objects, next),
next => _readObjects(objects, next),
next => _listObjects('', objects, next),
next => _listObjects('bbb/', objects, next),
next => _deleteObjects(objects, next),
], err => {
done(err);
});
});
it('Should update bucket properties', done => {
dispatcher.get(
`/default/attributes/${Bucket}`, (err, response, body) => {
assert.strictEqual(err, null);
const bucketInfo = body;
const newOwnerDisplayName = 'divertedfrom';
bucketInfo.ownerDisplayName = newOwnerDisplayName;
dispatcher.post(
`/default/attributes/${Bucket}`, bucketInfo, err => {
assert.strictEqual(err, null);
dispatcher.get(
`/default/attributes/${Bucket}`,
(err, response, body) => {
assert.strictEqual(err, null);
const newBucketInfo = body;
assert.strictEqual(
newBucketInfo.ownerDisplayName,
newOwnerDisplayName);
done(null);
});
});
});
});
it('Should fail to list a non existing bucket', done => {
dispatcher.get('/default/bucket/nonexisting',
(err, response) => {
assert.strictEqual(
response.responseHead.statusCode,
404);
done(err);
});
});
it('Should fail to get attributes from a non existing bucket', done => {
dispatcher.get('/default/attributes/nonexisting',
(err, response) => {
assert.strictEqual(
response.responseHead.statusCode,
404);
done(err);
});
});
it('should succeed a health check', done => {
dispatcher.get('/_/healthcheck', (err, response, body) => {
if (err) {
return done(err);
}
const expectedResponse = {
memorybucket: {
code: 200,
message: 'OK',
},
};
assert.strictEqual(response.responseHead.statusCode, 200);
assert.deepStrictEqual(body, expectedResponse);
return done(err);
});
});
it('should work with parallel route', done => {
const objectName = 'theObj';
async.waterfall([
next => _createObjects([objectName], next),
next => {
dispatcher.get(
`/default/parallel/${Bucket}/${objectName}`,
(err, response, body) => {
if (err) {
return next(err);
}
assert.strictEqual(response.responseHead.statusCode,
200);
const bucketMD = JSON.parse(body.bucket);
const objectMD = JSON.parse(body.obj);
const expectedObjectMD = { key: objectName };
assert.deepStrictEqual(bucketMD.name,
bucketInfo.name);
assert.deepStrictEqual(objectMD, expectedObjectMD);
return next(err);
});
},
next => _deleteObjects([objectName], next),
], done);
});
});

View File

@ -1,318 +0,0 @@
'use strict'; // eslint-disable-line strict
const assert = require('assert');
const async = require('async');
const RedisClient = require('../../../lib/metrics/RedisClient');
const StatsModel = require('../../../lib/metrics/StatsModel');
// setup redis client
const config = {
host: '127.0.0.1',
port: 6379,
enableOfflineQueue: true,
};
const fakeLogger = {
trace: () => {},
error: () => {},
};
const redisClient = new RedisClient(config, fakeLogger);
// setup stats model
const STATS_INTERVAL = 300; // 5 minutes
const STATS_EXPIRY = 86400; // 24 hours
const statsModel = new StatsModel(redisClient, STATS_INTERVAL, STATS_EXPIRY);
function setExpectedStats(expected) {
return expected.concat(
Array((STATS_EXPIRY / STATS_INTERVAL) - expected.length).fill(0));
}
// Since many methods were overwritten, these tests should validate the changes
// made to the original methods
describe('StatsModel class', () => {
const id = 'arsenal-test';
const id2 = 'test-2';
const id3 = 'test-3';
afterEach(() => redisClient.clear(() => {}));
it('should convert a 2d array columns into rows and vice versa using _zip',
() => {
const arrays = [
[1, 2, 3],
[4, 5, 6],
[7, 8, 9],
];
const res = statsModel._zip(arrays);
const expected = [
[1, 4, 7],
[2, 5, 8],
[3, 6, 9],
];
assert.deepStrictEqual(res, expected);
});
it('_zip should return an empty array if given an invalid array', () => {
const arrays = [];
const res = statsModel._zip(arrays);
assert.deepStrictEqual(res, []);
});
it('_getCount should return a an array of all valid integer values',
() => {
const res = statsModel._getCount([
[null, '1'],
[null, '2'],
[null, null],
]);
assert.deepStrictEqual(res, setExpectedStats([1, 2, 0]));
});
it('should correctly record a new request by default one increment',
done => {
async.series([
next => {
statsModel.reportNewRequest(id, (err, res) => {
assert.ifError(err);
const expected = [[null, 1], [null, 1]];
assert.deepStrictEqual(res, expected);
next();
});
},
next => {
statsModel.reportNewRequest(id, (err, res) => {
assert.ifError(err);
const expected = [[null, 2], [null, 1]];
assert.deepStrictEqual(res, expected);
next();
});
},
], done);
});
it('should record new requests by defined amount increments', done => {
function noop() {}
async.series([
next => {
statsModel.reportNewRequest(id, 9);
statsModel.getStats(fakeLogger, id, (err, res) => {
assert.ifError(err);
assert.deepStrictEqual(res.requests, setExpectedStats([9]));
next();
});
},
next => {
statsModel.reportNewRequest(id);
statsModel.getStats(fakeLogger, id, (err, res) => {
assert.ifError(err);
assert.deepStrictEqual(res.requests,
setExpectedStats([10]));
next();
});
},
next => {
statsModel.reportNewRequest(id, noop);
statsModel.getStats(fakeLogger, id, (err, res) => {
assert.ifError(err);
assert.deepStrictEqual(res.requests,
setExpectedStats([11]));
next();
});
},
], done);
});
it('should correctly record a 500 on the server', done => {
statsModel.report500(id, (err, res) => {
assert.ifError(err);
const expected = [[null, 1], [null, 1]];
assert.deepStrictEqual(res, expected);
done();
});
});
it('should respond back with total requests as an array', done => {
async.series([
next => {
statsModel.reportNewRequest(id, err => {
assert.ifError(err);
next();
});
},
next => {
statsModel.report500(id, err => {
assert.ifError(err);
next();
});
},
next => {
statsModel.getStats(fakeLogger, id, (err, res) => {
assert.ifError(err);
const expected = {
'requests': setExpectedStats([1]),
'500s': setExpectedStats([1]),
'sampleDuration': STATS_EXPIRY,
};
assert.deepStrictEqual(res, expected);
next();
});
},
], done);
});
it('should not crash on empty results', done => {
async.series([
next => {
statsModel.getStats(fakeLogger, id, (err, res) => {
assert.ifError(err);
const expected = {
'requests': setExpectedStats([]),
'500s': setExpectedStats([]),
'sampleDuration': STATS_EXPIRY,
};
assert.deepStrictEqual(res, expected);
next();
});
},
next => {
statsModel.getAllStats(fakeLogger, id, (err, res) => {
assert.ifError(err);
const expected = {
'requests': setExpectedStats([]),
'500s': setExpectedStats([]),
'sampleDuration': STATS_EXPIRY,
};
assert.deepStrictEqual(res, expected);
next();
});
},
], done);
});
it('should return a zero-filled array if no ids are passed to getAllStats',
done => {
statsModel.getAllStats(fakeLogger, [], (err, res) => {
assert.ifError(err);
assert.deepStrictEqual(res.requests, setExpectedStats([]));
assert.deepStrictEqual(res['500s'], setExpectedStats([]));
done();
});
});
it('should get accurately reported data for given id from getAllStats',
done => {
statsModel.reportNewRequest(id, 9);
statsModel.reportNewRequest(id2, 2);
statsModel.reportNewRequest(id3, 3);
statsModel.report500(id);
async.series([
next => {
statsModel.getAllStats(fakeLogger, [id], (err, res) => {
assert.ifError(err);
assert.equal(res.requests[0], 9);
assert.equal(res['500s'][0], 1);
next();
});
},
next => {
statsModel.getAllStats(fakeLogger, [id, id2, id3],
(err, res) => {
assert.ifError(err);
assert.equal(res.requests[0], 14);
assert.deepStrictEqual(res.requests,
setExpectedStats([14]));
next();
});
},
], done);
});
it('should normalize to the nearest hour using normalizeTimestampByHour',
() => {
const date = new Date('2018-09-13T23:30:59.195Z');
const newDate = new Date(statsModel.normalizeTimestampByHour(date));
assert.strictEqual(date.getHours(), newDate.getHours());
assert.strictEqual(newDate.getMinutes(), 0);
assert.strictEqual(newDate.getSeconds(), 0);
assert.strictEqual(newDate.getMilliseconds(), 0);
});
it('should get previous hour using _getDatePreviousHour', () => {
const date = new Date('2018-09-13T23:30:59.195Z');
const newDate = statsModel._getDatePreviousHour(new Date(date));
const millisecondsInOneHour = 3600000;
assert.strictEqual(date - newDate, millisecondsInOneHour);
});
it('should get an array of hourly timestamps using getSortedSetHours',
() => {
const epoch = 1536882476501;
const millisecondsInOneHour = 3600000;
const expected = [];
let dateInMilliseconds = statsModel.normalizeTimestampByHour(
new Date(epoch));
for (let i = 0; i < 24; i++) {
expected.push(dateInMilliseconds);
dateInMilliseconds -= millisecondsInOneHour;
}
const res = statsModel.getSortedSetHours(epoch);
assert.deepStrictEqual(res, expected);
});
it('should apply TTL on a new sorted set using addToSortedSet', done => {
const key = 'a-test-key';
const score = 100;
const value = 'a-value';
const now = Date.now();
const nearestHour = statsModel.normalizeTimestampByHour(new Date(now));
statsModel.addToSortedSet(key, score, value, (err, res) => {
assert.ifError(err);
// check both a "zadd" and "expire" occurred
assert.equal(res, 1);
redisClient.ttl(key, (err, res) => {
assert.ifError(err);
// assert this new set has a ttl applied
assert(res > 0);
const adjustmentSecs = now - nearestHour;
const msInADay = 24 * 60 * 60 * 1000;
const msInAnHour = 60 * 60 * 1000;
const upperLimitSecs =
Math.ceil((msInADay - adjustmentSecs) / 1000);
const lowerLimitSecs =
Math.floor((msInADay - adjustmentSecs - msInAnHour) / 1000);
// assert new ttl is between 23 and 24 hours adjusted by time
// elapsed since normalized hourly time
assert(res >= lowerLimitSecs);
assert(res <= upperLimitSecs);
done();
});
});
});
});

View File

@ -1,326 +0,0 @@
const assert = require('assert');
const ChainBackend = require('../../../lib/auth/auth').backends.chainBackend;
const BaseBackend = require('../../../lib/auth/auth').backends.baseBackend;
const errors = require('../../../lib/errors');
const testError = new Error('backend error');
const backendWithAllMethods = {
verifySignatureV2: () => {},
verifySignatureV4: () => {},
getCanonicalIds: () => {},
getEmailAddresses: () => {},
checkPolicies: () => {},
healthcheck: () => {},
};
function getBackendWithMissingMethod(methodName) {
const backend = Object.assign({}, backendWithAllMethods);
delete backend[methodName];
return backend;
}
class TestBackend extends BaseBackend {
constructor(service, error, result) {
super(service);
this._error = error;
this._result = result;
}
verifySignatureV2(stringToSign, signatureFromRequest, accessKey, options, callback) {
return callback(this._error, this._result);
}
verifySignatureV4(stringToSign, signatureFromRequest, accessKey, region, scopeDate, options, callback) {
return callback(this._error, this._result);
}
getCanonicalIds(emailAddresses, options, callback) {
return callback(this._error, this._result);
}
getEmailAddresses(canonicalIDs, options, callback) {
return callback(this._error, this._result);
}
checkPolicies(requestContextParams, userArn, options, callback) {
return callback(this._error, this._result);
}
healthcheck(reqUid, callback) {
return callback(this._error, this._result);
}
}
describe('Auth Backend: Chain Backend', () => {
[
['should throw an error if client list is not an array', null],
['should throw an error if client list empty', []],
['should throw an error if a client is missing the verifySignatureV2 method', [
new TestBackend(),
getBackendWithMissingMethod('verifySignatureV2'),
]],
['should throw an error if a client is missing the verifySignatureV4 auth method', [
new TestBackend(),
getBackendWithMissingMethod('verifySignatureV4'),
]],
['should throw an error if a client is missing the getCanonicalId method', [
new TestBackend(),
getBackendWithMissingMethod('getCanonicalIds'),
]],
['should throw an error if a client is missing the getEmailAddresses method', [
new TestBackend(),
getBackendWithMissingMethod('getEmailAddresses'),
]],
['should throw an error if a client is missing the checkPolicies method', [
new TestBackend(),
getBackendWithMissingMethod('checkPolicies'),
]],
['should throw an error if a client is missing the healthcheck method', [
new TestBackend(),
getBackendWithMissingMethod('healthcheck'),
]],
].forEach(([msg, input]) => it(msg, () => {
assert.throws(() => {
new ChainBackend('chain', input); // eslint-disable-line no-new
});
}));
[
// function name, function args
['verifySignatureV2', [null, null, null, null]],
['verifySignatureV4', [null, null, null, null, null, null]],
].forEach(([fn, fnArgs]) =>
describe(`::${fn}`, () => {
it('should return an error if none of the clients returns a result', done => {
const backend = new ChainBackend('chain', [
new TestBackend('test1', testError, null),
new TestBackend('test2', testError, null),
new TestBackend('test3', testError, null),
]);
backend[fn](...fnArgs, err => {
assert.deepStrictEqual(err, testError);
done();
});
});
[
[
'should return result of the first successful client (multiple successful client)',
'expectedResult',
// backend constructor args
[
['test1', null, 'expectedResult'],
['test2', null, 'test2'],
['test3', testError, null],
],
],
[
'should return result of successful client',
'expectedResult',
// backend constructor args
[
['test1', testError, null],
['test2', null, 'expectedResult'],
['test3', testError, null],
],
],
[
'should return result of successful client',
'expectedResult',
// backend constructor args
[
['test1', testError, null],
['test1', testError, null],
['test3', null, 'expectedResult'],
],
],
].forEach(([msg, expected, backendArgs]) => {
it(msg, done => {
const backend = new ChainBackend('chain',
backendArgs.map((args) => new TestBackend(...args)));
backend[fn](...fnArgs, (err, res) => {
assert.ifError(err);
assert.strictEqual(res, expected);
done();
});
});
});
}));
[
// function name, function args
['getCanonicalIds', [null, null]],
['getEmailAddresses', [null, null]],
].forEach(([fn, fnArgs]) =>
describe(`::${fn}`, () => {
it('should return an error if any of the clients fails', done => {
const backend = new ChainBackend('chain', [
new TestBackend('test1', null, { message: { body: { test1: 'aaa' } } }),
new TestBackend('test2', testError, null),
new TestBackend('test3', null, { message: { body: { test2: 'bbb' } } }),
]);
backend[fn](...fnArgs, err => {
assert.deepStrictEqual(err, testError);
done();
});
});
it('should merge results from clients into a single response object', done => {
const backend = new ChainBackend('chain', [
new TestBackend('test1', null, { message: { body: { test1: 'aaa' } } }),
new TestBackend('test2', null, { message: { body: { test2: 'bbb' } } }),
]);
backend[fn](...fnArgs, (err, res) => {
assert.ifError(err);
assert.deepStrictEqual(res, {
message: { body: {
test1: 'aaa',
test2: 'bbb',
} },
});
done();
});
});
}));
describe('::checkPolicies', () => {
it('should return an error if any of the clients fails', done => {
const backend = new ChainBackend('chain', [
new TestBackend('test1', null, {
message: { body: [{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/obj1' }] },
}),
new TestBackend('test2', testError, null),
new TestBackend('test3', null, {
message: { body: [{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/obj1' }] },
}),
]);
backend.checkPolicies(null, null, null, err => {
assert.deepStrictEqual(err, testError);
done();
});
});
it('should merge results from clients into a single response object', done => {
const backend = new ChainBackend('chain', [
new TestBackend('test1', null, {
message: { body: [{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/obj1' }] },
}),
new TestBackend('test2', null, {
message: { body: [{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/obj2' }] },
}),
new TestBackend('test3', null, {
message: { body: [{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/obj1' }] },
}),
]);
backend.checkPolicies(null, null, null, (err, res) => {
assert.ifError(err);
assert.deepStrictEqual(res, {
message: { body: [
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/obj1' },
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/obj2' },
] },
});
done();
});
});
});
describe('::_mergeObject', () => {
it('should correctly merge reponses', () => {
const objectResps = [
{ message: { body: {
id1: 'email1@test.com',
wrongformatcanid: 'WrongFormat',
id4: 'email4@test.com',
} } },
{ message: { body: {
id2: 'NotFound',
id3: 'email3@test.com',
id4: 'email5@test.com',
} } },
];
assert.deepStrictEqual(
ChainBackend._mergeObjects(objectResps),
{
id1: 'email1@test.com',
wrongformatcanid: 'WrongFormat',
id2: 'NotFound',
id3: 'email3@test.com',
// id4 should be overwritten
id4: 'email5@test.com',
},
);
});
});
describe('::_mergePolicies', () => {
it('should correctly merge policies', () => {
const policyResps = [
{ message: { body: [
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/true1' },
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/true2' },
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/false1' },
] } },
{ message: { body: [
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/true1' },
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/true2' },
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/false2' },
] } },
];
assert.deepStrictEqual(
ChainBackend._mergePolicies(policyResps),
[
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/true1' },
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/true2' },
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/false1' },
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/false2' },
],
);
});
});
describe('::checkhealth', () => {
it('should return error if a single client is unhealthy', done => {
const backend = new ChainBackend('chain', [
new TestBackend('test1', null, { code: 200 }),
new TestBackend('test2', testError, { code: 503 }),
new TestBackend('test3', null, { code: 200 }),
]);
backend.healthcheck(null, (err, res) => {
assert.deepStrictEqual(err, errors.InternalError);
assert.deepStrictEqual(res, [
{ error: null, status: { code: 200 } },
{ error: testError, status: { code: 503 } },
{ error: null, status: { code: 200 } },
]);
done();
});
});
it('should return result if all clients are healthy', done => {
const backend = new ChainBackend('chain', [
new TestBackend('test1', null, { msg: 'test1', code: 200 }),
new TestBackend('test2', null, { msg: 'test2', code: 200 }),
new TestBackend('test3', null, { msg: 'test3', code: 200 }),
]);
backend.healthcheck(null, (err, res) => {
assert.ifError(err);
assert.deepStrictEqual(res, [
{ error: null, status: { msg: 'test1', code: 200 } },
{ error: null, status: { msg: 'test2', code: 200 } },
{ error: null, status: { msg: 'test3', code: 200 } },
]);
done();
});
});
});
});

View File

@ -89,10 +89,10 @@ describe('AuthLoader class', () => {
// Check a failure when the type of field is different than // Check a failure when the type of field is different than
// expected // expected
it(`should fail when modified field ${test[0]} ${test[1]}`, it(`should fail when modified field ${test[0]} ${test[1]}`,
done => { done => {
should._exec = shouldFail; should._exec = shouldFail;
should.modifiedField(obj, test[0], test[1], done); should.modifiedField(obj, test[0], test[1], done);
}); });
} }
}); });

View File

@ -1,6 +1,6 @@
const assert = require('assert'); const assert = require('assert');
const Indexer = require('../../../../lib/auth/backends/in_memory/Indexer'); const Indexer = require('../../../../lib/auth/in_memory/Indexer');
const ref = require('./sample_authdata.json'); const ref = require('./sample_authdata.json');
const { should } = require('./AuthLoader.spec'); const { should } = require('./AuthLoader.spec');

Some files were not shown because too many files have changed in this diff Show More