Compare commits

..

No commits in common. "0509e8ecd3fac537a41744b103db644c3bb5d907" and "310834c237da624800ea6b4e291045808aa9bf80" have entirely different histories.

162 changed files with 3431 additions and 11285 deletions

View File

@ -39,14 +39,10 @@ jobs:
run: yarn --silent lint -- --max-warnings 0 run: yarn --silent lint -- --max-warnings 0
- name: lint markdown - name: lint markdown
run: yarn --silent lint_md run: yarn --silent lint_md
- name: add hostname - name: run unit tests
run: | run: yarn test
sudo sh -c "echo '127.0.0.1 testrequestbucket.localhost' >> /etc/hosts"
- name: test and coverage
run: yarn --silent coverage
- name: run functional tests - name: run functional tests
run: yarn ft_test run: yarn ft_test
- uses: codecov/codecov-action@v2
- name: run executables tests - name: run executables tests
run: yarn install && yarn test run: yarn install && yarn test
working-directory: 'lib/executables/pensieveCreds/' working-directory: 'lib/executables/pensieveCreds/'

View File

@ -1,7 +1,5 @@
# Arsenal # Arsenal
[![codecov](https://codecov.io/gh/scality/Arsenal/branch/development/8.1/graph/badge.svg?token=X0esXhJSwb)](https://codecov.io/gh/scality/Arsenal)
Common utilities for the S3 project components Common utilities for the S3 project components
Within this repository, you will be able to find the shared libraries for the Within this repository, you will be able to find the shared libraries for the

View File

@ -85,66 +85,6 @@ Used to store the bucket lifecycle configuration info
### Properties Added ### Properties Added
```javascript
this._uid = uid || uuid();
```
### Usage
Used to set a unique identifier on a bucket
## Model version 8
### Properties Added
```javascript
this._readLocationConstraint = readLocationConstraint || null;
```
### Usage
Used to store default read location of the bucket
## Model version 9
### Properties Added
```javascript
this._isNFS = isNFS || null;
```
### Usage
Used to determine whether the bucket may be accessed through NFS
## Model version 10
### Properties Added
```javascript
this._ingestion = ingestionConfig || null;
```
### Usage
Used to store the ingestion status of a bucket
## Model version 11
### Properties Added
```javascript
this._azureInfo = azureInfo || null;
```
### Usage
Used to store Azure storage account specific information
## Model version 12
### Properties Added
```javascript ```javascript
this._objectLockEnabled = objectLockEnabled || false; this._objectLockEnabled = objectLockEnabled || false;
this._objectLockConfiguration = objectLockConfiguration || null; this._objectLockConfiguration = objectLockConfiguration || null;
@ -155,7 +95,7 @@ this._objectLockConfiguration = objectLockConfiguration || null;
Used to determine whether object lock capabilities are enabled on a bucket and Used to determine whether object lock capabilities are enabled on a bucket and
to store the object lock configuration of the bucket to store the object lock configuration of the bucket
## Model version 13 ## Model version 8
### Properties Added ### Properties Added
@ -167,7 +107,7 @@ this._notificationConfiguration = notificationConfiguration || null;
Used to store the bucket notification configuration info Used to store the bucket notification configuration info
## Model version 14 ## Model version 9
### Properties Added ### Properties Added
@ -178,3 +118,15 @@ this._serverSideEncryption.configuredMasterKeyId = configuredMasterKeyId || unde
### Usage ### Usage
Used to store the users configured KMS key id Used to store the users configured KMS key id
## Model version 10
### Properties Added
```javascript
this._uid = uid || uuid();
```
### Usage
Used to set a unique identifier on a bucket

View File

@ -26,7 +26,7 @@
}, },
"BucketAlreadyOwnedByYou": { "BucketAlreadyOwnedByYou": {
"code": 409, "code": 409,
"description": "A bucket with this name exists and is already owned by you" "description": "Your previous request to create the named bucket succeeded and you already own it. You get this error in all AWS regions except US Standard, us-east-1. In us-east-1 region, you will get 200 OK, but it is no-op (if bucket exists S3 will not do anything)."
}, },
"BucketNotEmpty": { "BucketNotEmpty": {
"code": 409, "code": 409,
@ -403,10 +403,6 @@
"code": 409, "code": 409,
"description": "The request was rejected because it attempted to create a resource that already exists." "description": "The request was rejected because it attempted to create a resource that already exists."
}, },
"KeyAlreadyExists": {
"code": 409,
"description": "The request was rejected because it attempted to create a resource that already exists."
},
"ServiceFailure": { "ServiceFailure": {
"code": 500, "code": 500,
"description": "Server error: the request processing has failed because of an unknown error, exception or failure." "description": "Server error: the request processing has failed because of an unknown error, exception or failure."
@ -764,10 +760,5 @@
"ReadOnly": { "ReadOnly": {
"description": "trying to write to read only back-end", "description": "trying to write to read only back-end",
"code": 403 "code": 403
},
"_comment": "----------------------- authbackend -----------------------",
"AuthMethodNotImplemented": {
"description": "AuthMethodNotImplemented",
"code": 501
} }
} }

View File

@ -1,28 +0,0 @@
{
"groups": {
"default": {
"packages": [
"lib/executables/pensieveCreds/package.json",
"package.json"
]
}
},
"branchPrefix": "improvement/greenkeeper.io/",
"commitMessages": {
"initialBadge": "docs(readme): add Greenkeeper badge",
"initialDependencies": "chore(package): update dependencies",
"initialBranches": "chore(bert-e): whitelist greenkeeper branches",
"dependencyUpdate": "fix(package): update ${dependency} to version ${version}",
"devDependencyUpdate": "chore(package): update ${dependency} to version ${version}",
"dependencyPin": "fix: pin ${dependency} to ${oldVersionResolved}",
"devDependencyPin": "chore: pin ${dependency} to ${oldVersionResolved}",
"closes": "\n\nCloses #${number}"
},
"ignore": [
"ajv",
"eslint",
"eslint-plugin-react",
"eslint-config-airbnb",
"eslint-config-scality"
]
}

202
index.js
View File

@ -1,202 +0,0 @@
module.exports = {
auth: require('./lib/auth/auth'),
constants: require('./lib/constants'),
db: require('./lib/db'),
errors: require('./lib/errors.js'),
errorUtils: require('./lib/errorUtils'),
shuffle: require('./lib/shuffle'),
stringHash: require('./lib/stringHash'),
ipCheck: require('./lib/ipCheck'),
jsutil: require('./lib/jsutil'),
https: {
ciphers: require('./lib/https/ciphers.js'),
dhparam: require('./lib/https/dh2048.js'),
},
algorithms: {
list: require('./lib/algos/list/exportAlgos'),
listTools: {
DelimiterTools: require('./lib/algos/list/tools'),
Skip: require('./lib/algos/list/skip'),
},
cache: {
LRUCache: require('./lib/algos/cache/LRUCache'),
},
stream: {
MergeStream: require('./lib/algos/stream/MergeStream'),
},
SortedSet: require('./lib/algos/set/SortedSet'),
},
policies: {
evaluators: require('./lib/policyEvaluator/evaluator.js'),
validateUserPolicy: require('./lib/policy/policyValidator')
.validateUserPolicy,
evaluatePrincipal: require('./lib/policyEvaluator/principal'),
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
requestUtils: require('./lib/policyEvaluator/requestUtils'),
actionMaps: require('./lib/policyEvaluator/utils/actionMaps'),
},
Clustering: require('./lib/Clustering'),
testing: {
matrix: require('./lib/testing/matrix.js'),
},
versioning: {
VersioningConstants: require('./lib/versioning/constants.js')
.VersioningConstants,
Version: require('./lib/versioning/Version.js').Version,
VersionID: require('./lib/versioning/VersionID.js'),
WriteGatheringManager: require('./lib/versioning/WriteGatheringManager.js'),
WriteCache: require('./lib/versioning/WriteCache.js'),
VersioningRequestProcessor: require('./lib/versioning/VersioningRequestProcessor.js'),
},
network: {
http: {
server: require('./lib/network/http/server'),
utils: require('./lib/network/http/utils'),
},
rpc: require('./lib/network/rpc/rpc'),
level: require('./lib/network/rpc/level-net'),
rest: {
RESTServer: require('./lib/network/rest/RESTServer'),
RESTClient: require('./lib/network/rest/RESTClient'),
},
RoundRobin: require('./lib/network/RoundRobin'),
probe: {
ProbeServer: require('./lib/network/probe/ProbeServer'),
HealthProbeServer:
require('./lib/network/probe/HealthProbeServer.js'),
Utils: require('./lib/network/probe/Utils.js'),
},
kmip: require('./lib/network/kmip'),
kmipClient: require('./lib/network/kmip/Client'),
},
s3routes: {
routes: require('./lib/s3routes/routes'),
routesUtils: require('./lib/s3routes/routesUtils'),
},
s3middleware: {
userMetadata: require('./lib/s3middleware/userMetadata'),
convertToXml: require('./lib/s3middleware/convertToXml'),
escapeForXml: require('./lib/s3middleware/escapeForXml'),
objectLegalHold: require('./lib/s3middleware/objectLegalHold'),
tagging: require('./lib/s3middleware/tagging'),
checkDateModifiedHeaders:
require('./lib/s3middleware/validateConditionalHeaders')
.checkDateModifiedHeaders,
validateConditionalHeaders:
require('./lib/s3middleware/validateConditionalHeaders')
.validateConditionalHeaders,
MD5Sum: require('./lib/s3middleware/MD5Sum'),
NullStream: require('./lib/s3middleware/nullStream'),
objectUtils: require('./lib/s3middleware/objectUtils'),
azureHelper: {
mpuUtils:
require('./lib/s3middleware/azureHelpers/mpuUtils'),
ResultsCollector:
require('./lib/s3middleware/azureHelpers/ResultsCollector'),
SubStreamInterface:
require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
},
prepareStream: require('./lib/s3middleware/prepareStream'),
processMpuParts: require('./lib/s3middleware/processMpuParts'),
retention: require('./lib/s3middleware/objectRetention'),
lifecycleHelpers: require('./lib/s3middleware/lifecycleHelpers'),
},
storage: {
metadata: {
MetadataWrapper: require('./lib/storage/metadata/MetadataWrapper'),
bucketclient: {
BucketClientInterface:
require('./lib/storage/metadata/bucketclient/' +
'BucketClientInterface'),
LogConsumer:
require('./lib/storage/metadata/bucketclient/LogConsumer'),
},
file: {
BucketFileInterface:
require('./lib/storage/metadata/file/BucketFileInterface'),
MetadataFileServer:
require('./lib/storage/metadata/file/MetadataFileServer'),
MetadataFileClient:
require('./lib/storage/metadata/file/MetadataFileClient'),
},
inMemory: {
metastore:
require('./lib/storage/metadata/in_memory/metastore'),
metadata: require('./lib/storage/metadata/in_memory/metadata'),
bucketUtilities:
require('./lib/storage/metadata/in_memory/bucket_utilities'),
},
mongoclient: {
MongoClientInterface:
require('./lib/storage/metadata/mongoclient/' +
'MongoClientInterface'),
LogConsumer:
require('./lib/storage/metadata/mongoclient/LogConsumer'),
},
proxy: {
Server: require('./lib/storage/metadata/proxy/Server'),
},
},
data: {
DataWrapper: require('./lib/storage/data/DataWrapper'),
MultipleBackendGateway:
require('./lib/storage/data/MultipleBackendGateway'),
parseLC: require('./lib/storage/data/LocationConstraintParser'),
file: {
DataFileStore:
require('./lib/storage/data/file/DataFileStore'),
DataFileInterface:
require('./lib/storage/data/file/DataFileInterface'),
},
external: {
AwsClient: require('./lib/storage/data/external/AwsClient'),
AzureClient: require('./lib/storage/data/external/AzureClient'),
GcpClient: require('./lib/storage/data/external/GcpClient'),
GCP: require('./lib/storage/data/external/GCP/GcpService'),
GcpUtils: require('./lib/storage/data/external/GCP/GcpUtils'),
GcpSigner: require('./lib/storage/data/external/GCP/GcpSigner'),
PfsClient: require('./lib/storage/data/external/PfsClient'),
backendUtils: require('./lib/storage/data/external/utils'),
},
inMemory: {
datastore: require('./lib/storage/data/in_memory/datastore'),
},
},
utils: require('./lib/storage/utils'),
},
models: {
BackendInfo: require('./lib/models/BackendInfo'),
BucketInfo: require('./lib/models/BucketInfo'),
BucketAzureInfo: require('./lib/models/BucketAzureInfo'),
ObjectMD: require('./lib/models/ObjectMD'),
ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
ObjectMDAzureInfo: require('./lib/models/ObjectMDAzureInfo'),
ARN: require('./lib/models/ARN'),
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
ReplicationConfiguration:
require('./lib/models/ReplicationConfiguration'),
LifecycleConfiguration:
require('./lib/models/LifecycleConfiguration'),
LifecycleRule: require('./lib/models/LifecycleRule'),
BucketPolicy: require('./lib/models/BucketPolicy'),
ObjectLockConfiguration:
require('./lib/models/ObjectLockConfiguration'),
NotificationConfiguration:
require('./lib/models/NotificationConfiguration'),
},
metrics: {
StatsClient: require('./lib/metrics/StatsClient'),
StatsModel: require('./lib/metrics/StatsModel'),
RedisClient: require('./lib/metrics/RedisClient'),
ZenkoMetrics: require('./lib/metrics/ZenkoMetrics'),
},
pensieve: {
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
},
stream: {
readJSONStreamObject: require('./lib/stream/readJSONStreamObject'),
},
patches: {
locationConstraints: require('./lib/patches/locationConstraints'),
},
};

View File

@ -2,7 +2,6 @@ export const auth = require('./lib/auth/auth');
export const constants = require('./lib/constants'); export const constants = require('./lib/constants');
export const db = require('./lib/db'); export const db = require('./lib/db');
export const errors = require('./lib/errors.js'); export const errors = require('./lib/errors.js');
export const errorUtils = require('./lib/errorUtils');
export const shuffle = require('./lib/shuffle'); export const shuffle = require('./lib/shuffle');
export const stringHash = require('./lib/stringHash'); export const stringHash = require('./lib/stringHash');
export const ipCheck = require('./lib/ipCheck'); export const ipCheck = require('./lib/ipCheck');
@ -15,10 +14,15 @@ export const https = {
}; };
export const algorithms = { export const algorithms = {
list: require('./lib/algos/list/exportAlgos'), list: {
Basic: require('./lib/algos/list/basic').List,
Delimiter: require('./lib/algos/list/delimiter').Delimiter,
DelimiterVersions: require('./lib/algos/list/delimiterVersions').DelimiterVersions,
DelimiterMaster: require('./lib/algos/list/delimiterMaster').DelimiterMaster,
MPU: require('./lib/algos/list/MPU').MultipartUploads,
},
listTools: { listTools: {
DelimiterTools: require('./lib/algos/list/tools'), DelimiterTools: require('./lib/algos/list/tools'),
Skip: require('./lib/algos/list/skip'),
}, },
cache: { cache: {
LRUCache: require('./lib/algos/cache/LRUCache'), LRUCache: require('./lib/algos/cache/LRUCache'),
@ -54,7 +58,6 @@ export const versioning = {
export const network = { export const network = {
http: { http: {
server: require('./lib/network/http/server'), server: require('./lib/network/http/server'),
utils: require('./lib/network/http/utils'),
}, },
rpc: require('./lib/network/rpc/rpc'), rpc: require('./lib/network/rpc/rpc'),
level: require('./lib/network/rpc/level-net'), level: require('./lib/network/rpc/level-net'),
@ -62,13 +65,10 @@ export const network = {
RESTServer: require('./lib/network/rest/RESTServer'), RESTServer: require('./lib/network/rest/RESTServer'),
RESTClient: require('./lib/network/rest/RESTClient'), RESTClient: require('./lib/network/rest/RESTClient'),
}, },
RoundRobin: require('./lib/network/RoundRobin'),
probe: { probe: {
ProbeServer: require('./lib/network/probe/ProbeServer'), ProbeServer: require('./lib/network/probe/ProbeServer'),
HealthProbeServer:
require('./lib/network/probe/HealthProbeServer.js'),
Utils: require('./lib/network/probe/Utils.js'),
}, },
RoundRobin: require('./lib/network/RoundRobin'),
kmip: require('./lib/network/kmip'), kmip: require('./lib/network/kmip'),
kmipClient: require('./lib/network/kmip/Client'), kmipClient: require('./lib/network/kmip/Client'),
}; };
@ -84,24 +84,16 @@ export const s3middleware = {
escapeForXml: require('./lib/s3middleware/escapeForXml'), escapeForXml: require('./lib/s3middleware/escapeForXml'),
objectLegalHold: require('./lib/s3middleware/objectLegalHold'), objectLegalHold: require('./lib/s3middleware/objectLegalHold'),
tagging: require('./lib/s3middleware/tagging'), tagging: require('./lib/s3middleware/tagging'),
checkDateModifiedHeaders:
require('./lib/s3middleware/validateConditionalHeaders')
.checkDateModifiedHeaders,
validateConditionalHeaders: validateConditionalHeaders:
require('./lib/s3middleware/validateConditionalHeaders') require('./lib/s3middleware/validateConditionalHeaders').validateConditionalHeaders,
.validateConditionalHeaders,
MD5Sum: require('./lib/s3middleware/MD5Sum'), MD5Sum: require('./lib/s3middleware/MD5Sum'),
NullStream: require('./lib/s3middleware/nullStream'), NullStream: require('./lib/s3middleware/nullStream'),
objectUtils: require('./lib/s3middleware/objectUtils'), objectUtils: require('./lib/s3middleware/objectUtils'),
azureHelper: { azureHelper: {
mpuUtils: mpuUtils: require('./lib/s3middleware/azureHelpers/mpuUtils'),
require('./lib/s3middleware/azureHelpers/mpuUtils'), ResultsCollector: require('./lib/s3middleware/azureHelpers/ResultsCollector'),
ResultsCollector: SubStreamInterface: require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
require('./lib/s3middleware/azureHelpers/ResultsCollector'),
SubStreamInterface:
require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
}, },
prepareStream: require('./lib/s3middleware/prepareStream'),
processMpuParts: require('./lib/s3middleware/processMpuParts'), processMpuParts: require('./lib/s3middleware/processMpuParts'),
retention: require('./lib/s3middleware/objectRetention'), retention: require('./lib/s3middleware/objectRetention'),
lifecycleHelpers: require('./lib/s3middleware/lifecycleHelpers'), lifecycleHelpers: require('./lib/s3middleware/lifecycleHelpers'),
@ -172,24 +164,17 @@ export const storage = {
}; };
export const models = { export const models = {
BackendInfo: require('./lib/models/BackendInfo'),
BucketInfo: require('./lib/models/BucketInfo'), BucketInfo: require('./lib/models/BucketInfo'),
BucketAzureInfo: require('./lib/models/BucketAzureInfo'),
ObjectMD: require('./lib/models/ObjectMD'), ObjectMD: require('./lib/models/ObjectMD'),
ObjectMDLocation: require('./lib/models/ObjectMDLocation'), ObjectMDLocation: require('./lib/models/ObjectMDLocation'),
ObjectMDAzureInfo: require('./lib/models/ObjectMDAzureInfo'),
ARN: require('./lib/models/ARN'), ARN: require('./lib/models/ARN'),
WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'), WebsiteConfiguration: require('./lib/models/WebsiteConfiguration'),
ReplicationConfiguration: ReplicationConfiguration: require('./lib/models/ReplicationConfiguration'),
require('./lib/models/ReplicationConfiguration'), LifecycleConfiguration: require('./lib/models/LifecycleConfiguration'),
LifecycleConfiguration:
require('./lib/models/LifecycleConfiguration'),
LifecycleRule: require('./lib/models/LifecycleRule'), LifecycleRule: require('./lib/models/LifecycleRule'),
BucketPolicy: require('./lib/models/BucketPolicy'), BucketPolicy: require('./lib/models/BucketPolicy'),
ObjectLockConfiguration: ObjectLockConfiguration: require('./lib/models/ObjectLockConfiguration'),
require('./lib/models/ObjectLockConfiguration'), NotificationConfiguration: require('./lib/models/NotificationConfiguration'),
NotificationConfiguration:
require('./lib/models/NotificationConfiguration'),
}; };
export const metrics = { export const metrics = {
@ -206,7 +191,3 @@ export const pensieve = {
export const stream = { export const stream = {
readJSONStreamObject: require('./lib/stream/readJSONStreamObject'), readJSONStreamObject: require('./lib/stream/readJSONStreamObject'),
}; };
export const patches = {
locationConstraints: require('./lib/patches/locationConstraints'),
};

View File

@ -91,7 +91,7 @@ class Vault {
requestContext: serializedRCsArr, requestContext: serializedRCsArr,
}, },
(err, userInfo) => vaultSignatureCb(err, userInfo, (err, userInfo) => vaultSignatureCb(err, userInfo,
params.log, callback), params.log, callback)
); );
} }
@ -146,7 +146,7 @@ class Vault {
requestContext: serializedRCs, requestContext: serializedRCs,
}, },
(err, userInfo) => vaultSignatureCb(err, userInfo, (err, userInfo) => vaultSignatureCb(err, userInfo,
params.log, callback, streamingV4Params), params.log, callback, streamingV4Params)
); );
} }

View File

@ -10,13 +10,11 @@ const constants = require('../constants');
const constructStringToSignV2 = require('./v2/constructStringToSign'); const constructStringToSignV2 = require('./v2/constructStringToSign');
const constructStringToSignV4 = require('./v4/constructStringToSign'); const constructStringToSignV4 = require('./v4/constructStringToSign');
const convertUTCtoISO8601 = require('./v4/timeUtils').convertUTCtoISO8601; const convertUTCtoISO8601 = require('./v4/timeUtils').convertUTCtoISO8601;
const vaultUtilities = require('./backends/in_memory/vaultUtilities'); const vaultUtilities = require('./in_memory/vaultUtilities');
const inMemoryBackend = require('./backends/in_memory/Backend'); const backend = require('./in_memory/Backend');
const validateAuthConfig = require('./backends/in_memory/validateAuthConfig'); const validateAuthConfig = require('./in_memory/validateAuthConfig');
const AuthLoader = require('./backends/in_memory/AuthLoader'); const AuthLoader = require('./in_memory/AuthLoader');
const Vault = require('./Vault'); const Vault = require('./Vault');
const baseBackend = require('./backends/base');
const chainBackend = require('./backends/ChainBackend');
let vault = null; let vault = null;
const auth = {}; const auth = {};
@ -194,7 +192,7 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
.filter(headerName => .filter(headerName =>
headerName.startsWith('x-amz-') headerName.startsWith('x-amz-')
|| headerName.startsWith('x-scal-') || headerName.startsWith('x-scal-')
|| headerName === 'host', || headerName === 'host'
).sort().join(';'); ).sort().join(';');
const params = { request, signedHeaders, payloadChecksum, const params = { request, signedHeaders, payloadChecksum,
credentialScope, timestamp, query: data, credentialScope, timestamp, query: data,
@ -224,14 +222,10 @@ module.exports = {
constructStringToSignV2, constructStringToSignV2,
}, },
inMemory: { inMemory: {
backend: inMemoryBackend, backend,
validateAuthConfig, validateAuthConfig,
AuthLoader, AuthLoader,
}, },
backends: {
baseBackend,
chainBackend,
},
AuthInfo, AuthInfo,
Vault, Vault,
}; };

View File

@ -1,189 +0,0 @@
'use strict'; // eslint-disable-line strict
const assert = require('assert');
const async = require('async');
const errors = require('../../errors');
const BaseBackend = require('./base');
/**
* Class that provides an authentication backend that will verify signatures
* and retrieve emails and canonical ids associated with an account using a
* given list of authentication backends and vault clients.
*
* @class ChainBackend
*/
class ChainBackend extends BaseBackend {
/**
* @constructor
* @param {string} service - service id
* @param {object[]} clients - list of authentication backends or vault clients
*/
constructor(service, clients) {
super(service);
assert(Array.isArray(clients) && clients.length > 0, 'invalid client list');
assert(clients.every(client =>
typeof client.verifySignatureV4 === 'function' &&
typeof client.verifySignatureV2 === 'function' &&
typeof client.getCanonicalIds === 'function' &&
typeof client.getEmailAddresses === 'function' &&
typeof client.checkPolicies === 'function' &&
typeof client.healthcheck === 'function',
), 'invalid client: missing required auth backend methods');
this._clients = clients;
}
/*
* try task against each client for one to be successful
*/
_tryEachClient(task, cb) {
async.tryEach(this._clients.map(client => done => task(client, done)), cb);
}
/*
* apply task to all clients
*/
_forEachClient(task, cb) {
async.map(this._clients, task, cb);
}
verifySignatureV2(stringToSign, signatureFromRequest, accessKey, options, callback) {
this._tryEachClient((client, done) => client.verifySignatureV2(
stringToSign,
signatureFromRequest,
accessKey,
options,
done,
), callback);
}
verifySignatureV4(stringToSign, signatureFromRequest, accessKey, region, scopeDate, options, callback) {
this._tryEachClient((client, done) => client.verifySignatureV4(
stringToSign,
signatureFromRequest,
accessKey,
region,
scopeDate,
options,
done,
), callback);
}
static _mergeObjects(objectResponses) {
return objectResponses.reduce(
(retObj, resObj) => Object.assign(retObj, resObj.message.body),
{});
}
getCanonicalIds(emailAddresses, options, callback) {
this._forEachClient(
(client, done) => client.getCanonicalIds(emailAddresses, options, done),
(err, res) => {
if (err) {
return callback(err);
}
// TODO: atm naive merge, better handling of conflicting email results
return callback(null, {
message: {
body: ChainBackend._mergeObjects(res),
},
});
});
}
getEmailAddresses(canonicalIDs, options, callback) {
this._forEachClient(
(client, done) => client.getEmailAddresses(canonicalIDs, options, done),
(err, res) => {
if (err) {
return callback(err);
}
return callback(null, {
message: {
body: ChainBackend._mergeObjects(res),
},
});
});
}
/*
* merge policy responses into a single message
*/
static _mergePolicies(policyResponses) {
const policyMap = {};
policyResponses.forEach(resp => {
if (!resp.message || !Array.isArray(resp.message.body)) {
return;
}
resp.message.body.forEach(policy => {
const key = (policy.arn || '') + (policy.versionId || '');
if (!policyMap[key] || !policyMap[key].isAllowed) {
policyMap[key] = policy;
}
// else is duplicate policy
});
});
return Object.keys(policyMap).map(key => {
const policyRes = { isAllowed: policyMap[key].isAllowed };
if (policyMap[key].arn !== '') {
policyRes.arn = policyMap[key].arn;
}
if (policyMap[key].versionId) {
policyRes.versionId = policyMap[key].versionId;
}
return policyRes;
});
}
/*
response format:
{ message: {
body: [{}],
code: number,
message: string,
} }
*/
checkPolicies(requestContextParams, userArn, options, callback) {
this._forEachClient((client, done) => client.checkPolicies(
requestContextParams,
userArn,
options,
done,
), (err, res) => {
if (err) {
return callback(err);
}
return callback(null, {
message: {
body: ChainBackend._mergePolicies(res),
},
});
});
}
healthcheck(reqUid, callback) {
this._forEachClient((client, done) =>
client.healthcheck(reqUid, (err, res) => done(null, {
error: !!err ? err : null,
status: res,
}),
), (err, res) => {
if (err) {
return callback(err);
}
const isError = res.some(results => !!results.error);
if (isError) {
return callback(errors.InternalError, res);
}
return callback(null, res);
});
}
}
module.exports = ChainBackend;

View File

@ -1,86 +0,0 @@
'use strict'; // eslint-disable-line strict
const errors = require('../../errors');
/**
* Base backend class
*
* @class BaseBackend
*/
class BaseBackend {
/**
* @constructor
* @param {string} service - service identifer for construction arn
*/
constructor(service) {
this.service = service;
}
/** verifySignatureV2
* @param {string} stringToSign - string to sign built per AWS rules
* @param {string} signatureFromRequest - signature sent with request
* @param {string} accessKey - account accessKey
* @param {object} options - contains algorithm (SHA1 or SHA256)
* @param {function} callback - callback with either error or user info
* @return {function} calls callback
*/
verifySignatureV2(stringToSign, signatureFromRequest,
accessKey, options, callback) {
return callback(errors.AuthMethodNotImplemented);
}
/** verifySignatureV4
* @param {string} stringToSign - string to sign built per AWS rules
* @param {string} signatureFromRequest - signature sent with request
* @param {string} accessKey - account accessKey
* @param {string} region - region specified in request credential
* @param {string} scopeDate - date specified in request credential
* @param {object} options - options to send to Vault
* (just contains reqUid for logging in Vault)
* @param {function} callback - callback with either error or user info
* @return {function} calls callback
*/
verifySignatureV4(stringToSign, signatureFromRequest, accessKey,
region, scopeDate, options, callback) {
return callback(errors.AuthMethodNotImplemented);
}
/**
* Gets canonical ID's for a list of accounts
* based on email associated with account
* @param {array} emails - list of email addresses
* @param {object} options - to send log id to vault
* @param {function} callback - callback to calling function
* @returns {function} callback with either error or
* object with email addresses as keys and canonical IDs
* as values
*/
getCanonicalIds(emails, options, callback) {
return callback(errors.AuthMethodNotImplemented);
}
/**
* Gets email addresses (referred to as diplay names for getACL's)
* for a list of accounts based on canonical IDs associated with account
* @param {array} canonicalIDs - list of canonicalIDs
* @param {object} options - to send log id to vault
* @param {function} callback - callback to calling function
* @returns {function} callback with either error or
* an object from Vault containing account canonicalID
* as each object key and an email address as the value (or "NotFound")
*/
getEmailAddresses(canonicalIDs, options, callback) {
return callback(errors.AuthMethodNotImplemented);
}
checkPolicies(requestContextParams, userArn, options, callback) {
return callback(null, { message: { body: [] } });
}
healthcheck(reqUid, callback) {
return callback(null, { code: 200, message: 'OK' });
}
}
module.exports = BaseBackend;

View File

@ -3,7 +3,7 @@ const glob = require('simple-glob');
const joi = require('@hapi/joi'); const joi = require('@hapi/joi');
const werelogs = require('werelogs'); const werelogs = require('werelogs');
const ARN = require('../../../models/ARN'); const ARN = require('../../models/ARN');
/** /**
* Load authentication information from files or pre-loaded account * Load authentication information from files or pre-loaded account

View File

@ -2,11 +2,10 @@
const crypto = require('crypto'); const crypto = require('crypto');
const errors = require('../../../errors'); const errors = require('../../errors');
const calculateSigningKey = require('./vaultUtilities').calculateSigningKey; const calculateSigningKey = require('./vaultUtilities').calculateSigningKey;
const hashSignature = require('./vaultUtilities').hashSignature; const hashSignature = require('./vaultUtilities').hashSignature;
const Indexer = require('./Indexer'); const Indexer = require('./Indexer');
const BaseBackend = require('../base');
function _formatResponse(userInfoToSend) { function _formatResponse(userInfoToSend) {
return { return {
@ -20,9 +19,9 @@ function _formatResponse(userInfoToSend) {
* Class that provides a memory backend for verifying signatures and getting * Class that provides a memory backend for verifying signatures and getting
* emails and canonical ids associated with an account. * emails and canonical ids associated with an account.
* *
* @class InMemoryBackend * @class Backend
*/ */
class InMemoryBackend extends BaseBackend { class Backend {
/** /**
* @constructor * @constructor
* @param {string} service - service identifer for construction arn * @param {string} service - service identifer for construction arn
@ -31,11 +30,19 @@ class InMemoryBackend extends BaseBackend {
* back and returns it in an object * back and returns it in an object
*/ */
constructor(service, indexer, formatter) { constructor(service, indexer, formatter) {
super(service); this.service = service;
this.indexer = indexer; this.indexer = indexer;
this.formatResponse = formatter; this.formatResponse = formatter;
} }
/** verifySignatureV2
* @param {string} stringToSign - string to sign built per AWS rules
* @param {string} signatureFromRequest - signature sent with request
* @param {string} accessKey - account accessKey
* @param {object} options - contains algorithm (SHA1 or SHA256)
* @param {function} callback - callback with either error or user info
* @return {function} calls callback
*/
verifySignatureV2(stringToSign, signatureFromRequest, verifySignatureV2(stringToSign, signatureFromRequest,
accessKey, options, callback) { accessKey, options, callback) {
const entity = this.indexer.getEntityByKey(accessKey); const entity = this.indexer.getEntityByKey(accessKey);
@ -58,6 +65,18 @@ class InMemoryBackend extends BaseBackend {
return callback(null, vaultReturnObject); return callback(null, vaultReturnObject);
} }
/** verifySignatureV4
* @param {string} stringToSign - string to sign built per AWS rules
* @param {string} signatureFromRequest - signature sent with request
* @param {string} accessKey - account accessKey
* @param {string} region - region specified in request credential
* @param {string} scopeDate - date specified in request credential
* @param {object} options - options to send to Vault
* (just contains reqUid for logging in Vault)
* @param {function} callback - callback with either error or user info
* @return {function} calls callback
*/
verifySignatureV4(stringToSign, signatureFromRequest, accessKey, verifySignatureV4(stringToSign, signatureFromRequest, accessKey,
region, scopeDate, options, callback) { region, scopeDate, options, callback) {
const entity = this.indexer.getEntityByKey(accessKey); const entity = this.indexer.getEntityByKey(accessKey);
@ -81,6 +100,16 @@ class InMemoryBackend extends BaseBackend {
return callback(null, vaultReturnObject); return callback(null, vaultReturnObject);
} }
/**
* Gets canonical ID's for a list of accounts
* based on email associated with account
* @param {array} emails - list of email addresses
* @param {object} log - log object
* @param {function} cb - callback to calling function
* @returns {function} callback with either error or
* object with email addresses as keys and canonical IDs
* as values
*/
getCanonicalIds(emails, log, cb) { getCanonicalIds(emails, log, cb) {
const results = {}; const results = {};
emails.forEach(email => { emails.forEach(email => {
@ -101,6 +130,16 @@ class InMemoryBackend extends BaseBackend {
return cb(null, vaultReturnObject); return cb(null, vaultReturnObject);
} }
/**
* Gets email addresses (referred to as diplay names for getACL's)
* for a list of accounts based on canonical IDs associated with account
* @param {array} canonicalIDs - list of canonicalIDs
* @param {object} options - to send log id to vault
* @param {function} cb - callback to calling function
* @returns {function} callback with either error or
* an object from Vault containing account canonicalID
* as each object key and an email address as the value (or "NotFound")
*/
getEmailAddresses(canonicalIDs, options, cb) { getEmailAddresses(canonicalIDs, options, cb) {
const results = {}; const results = {};
canonicalIDs.forEach(canonicalId => { canonicalIDs.forEach(canonicalId => {
@ -149,7 +188,7 @@ class InMemoryBackend extends BaseBackend {
} }
class S3AuthBackend extends InMemoryBackend { class S3AuthBackend extends Backend {
/** /**
* @constructor * @constructor
* @param {object} authdata - the authentication config file's data * @param {object} authdata - the authentication config file's data

View File

@ -43,7 +43,7 @@ function awsURIencode(input, encodeSlash, noEncodeStar) {
return encoded; return encoded;
} }
for (let i = 0; i < input.length; i++) { for (let i = 0; i < input.length; i++) {
let ch = input.charAt(i); const ch = input.charAt(i);
if ((ch >= 'A' && ch <= 'Z') || if ((ch >= 'A' && ch <= 'Z') ||
(ch >= 'a' && ch <= 'z') || (ch >= 'a' && ch <= 'z') ||
(ch >= '0' && ch <= '9') || (ch >= '0' && ch <= '9') ||
@ -57,20 +57,6 @@ function awsURIencode(input, encodeSlash, noEncodeStar) {
} else if (ch === '*') { } else if (ch === '*') {
encoded = encoded.concat(noEncodeStar ? '*' : '%2A'); encoded = encoded.concat(noEncodeStar ? '*' : '%2A');
} else { } else {
if (ch >= '\uD800' && ch <= '\uDBFF') {
// If this character is a high surrogate peek the next character
// and join it with this one if the next character is a low
// surrogate.
// Otherwise the encoded URI will contain the two surrogates as
// two distinct UTF-8 sequences which is not valid UTF-8.
if (i + 1 < input.length) {
const ch2 = input.charAt(i + 1);
if (ch2 >= '\uDC00' && ch2 <= '\uDFFF') {
i++;
ch += ch2;
}
}
}
encoded = encoded.concat(_toHexUTF8(ch)); encoded = encoded.concat(_toHexUTF8(ch));
} }
} }

View File

@ -127,17 +127,6 @@ function check(request, log, data, awsService) {
return { err: errors.RequestTimeTooSkewed }; return { err: errors.RequestTimeTooSkewed };
} }
let proxyPath = null;
if (request.headers.proxy_path) {
try {
proxyPath = decodeURIComponent(request.headers.proxy_path);
} catch (err) {
log.debug('invalid proxy_path header', { proxyPath, err });
return { err: errors.InvalidArgument.customizeDescription(
'invalid proxy_path header') };
}
}
const stringToSign = constructStringToSign({ const stringToSign = constructStringToSign({
log, log,
request, request,
@ -147,7 +136,6 @@ function check(request, log, data, awsService) {
timestamp, timestamp,
payloadChecksum, payloadChecksum,
awsService: service, awsService: service,
proxyPath,
}); });
log.trace('constructed stringToSign', { stringToSign }); log.trace('constructed stringToSign', { stringToSign });
if (stringToSign instanceof Error) { if (stringToSign instanceof Error) {

View File

@ -62,17 +62,6 @@ function check(request, log, data) {
return { err: errors.RequestTimeTooSkewed }; return { err: errors.RequestTimeTooSkewed };
} }
let proxyPath = null;
if (request.headers.proxy_path) {
try {
proxyPath = decodeURIComponent(request.headers.proxy_path);
} catch (err) {
log.debug('invalid proxy_path header', { proxyPath });
return { err: errors.InvalidArgument.customizeDescription(
'invalid proxy_path header') };
}
}
// In query v4 auth, the canonical request needs // In query v4 auth, the canonical request needs
// to include the query params OTHER THAN // to include the query params OTHER THAN
// the signature so create a // the signature so create a
@ -98,7 +87,6 @@ function check(request, log, data) {
credentialScope: credentialScope:
`${scopeDate}/${region}/${service}/${requestType}`, `${scopeDate}/${region}/${service}/${requestType}`,
awsService: service, awsService: service,
proxyPath,
}); });
if (stringToSign instanceof Error) { if (stringToSign instanceof Error) {
return { err: stringToSign }; return { err: stringToSign };

View File

@ -1,21 +1,20 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const crypto = require('crypto');
// The min value here is to manage further backward compat if we // The min value here is to manage further backward compat if we
// need it // need it
// Default value const iamSecurityTokenSizeMin = 128;
const vaultGeneratedIamSecurityTokenSizeMin = 128; const iamSecurityTokenSizeMax = 128;
// Safe to assume that a typical token size is less than 8192 bytes // Security token is an hex string (no real format from amazon)
const vaultGeneratedIamSecurityTokenSizeMax = 8192; const iamSecurityTokenPattern =
// Base-64 new RegExp(`^[a-f0-9]{${iamSecurityTokenSizeMin},` +
const vaultGeneratedIamSecurityTokenPattern = /^[A-Za-z0-9/+=]*$/; `${iamSecurityTokenSizeMax}}$`);
module.exports = { module.exports = {
// info about the iam security token // info about the iam security token
iamSecurityToken: { iamSecurityToken: {
min: vaultGeneratedIamSecurityTokenSizeMin, min: iamSecurityTokenSizeMin,
max: vaultGeneratedIamSecurityTokenSizeMax, max: iamSecurityTokenSizeMax,
pattern: vaultGeneratedIamSecurityTokenPattern, pattern: iamSecurityTokenPattern,
}, },
// PublicId is used as the canonicalID for a request that contains // PublicId is used as the canonicalID for a request that contains
// no authentication information. Requestor can access // no authentication information. Requestor can access
@ -24,7 +23,6 @@ module.exports = {
zenkoServiceAccount: 'http://acs.zenko.io/accounts/service', zenkoServiceAccount: 'http://acs.zenko.io/accounts/service',
metadataFileNamespace: '/MDFile', metadataFileNamespace: '/MDFile',
dataFileURL: '/DataFile', dataFileURL: '/DataFile',
passthroughFileURL: '/PassthroughFile',
// AWS states max size for user-defined metadata // AWS states max size for user-defined metadata
// (x-amz-meta- headers) is 2 KB: // (x-amz-meta- headers) is 2 KB:
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html // http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
@ -34,10 +32,7 @@ module.exports = {
emptyFileMd5: 'd41d8cd98f00b204e9800998ecf8427e', emptyFileMd5: 'd41d8cd98f00b204e9800998ecf8427e',
// Version 2 changes the format of the data location property // Version 2 changes the format of the data location property
// Version 3 adds the dataStoreName attribute // Version 3 adds the dataStoreName attribute
// Version 4 add the Creation-Time and Content-Language attributes, mdModelVersion: 3,
// and add support for x-ms-meta-* headers in UserMetadata
// Version 5 adds the azureInfo structure
mdModelVersion: 5,
/* /*
* Splitter is used to build the object name for the overview of a * Splitter is used to build the object name for the overview of a
* multipart upload and to build the object names for each part of a * multipart upload and to build the object names for each part of a
@ -77,44 +72,9 @@ module.exports = {
permittedCapitalizedBuckets: { permittedCapitalizedBuckets: {
METADATA: true, METADATA: true,
}, },
// Setting a lower object key limit to account for:
// - Mongo key limit of 1012 bytes
// - Version ID in Mongo Key if versioned of 33
// - Max bucket name length if bucket match false of 63
// - Extra prefix slash for bucket prefix if bucket match of 1
objectKeyByteLimit: 915,
/* delimiter for location-constraint. The location constraint will be able
* to include the ingestion flag
*/
zenkoSeparator: ':',
/* eslint-disable camelcase */ /* eslint-disable camelcase */
externalBackends: { aws_s3: true, azure: true, gcp: true, pfs: true }, externalBackends: { aws_s3: true, azure: true, gcp: true, pfs: true },
replicationBackends: { aws_s3: true, azure: true, gcp: true }, /* eslint-enable camelcase */
// hex digest of sha256 hash of empty string:
emptyStringHash: crypto.createHash('sha256')
.update('', 'binary').digest('hex'),
mpuMDStoredExternallyBackend: { aws_s3: true, gcp: true },
// AWS sets a minimum size limit for parts except for the last part.
// http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
minimumAllowedPartSize: 5242880,
gcpMaximumAllowedPartCount: 1024,
// GCP Object Tagging Prefix
gcpTaggingPrefix: 'aws-tag-',
productName: 'APN/1.0 Scality/1.0 Scality CloudServer for Zenko',
legacyLocations: ['sproxyd', 'legacy'],
// healthcheck default call from nginx is every 2 seconds
// for external backends, don't call unless at least 1 minute
// (60,000 milliseconds) since last call
externalBackendHealthCheckInterval: 60000,
// some of the available data backends (if called directly rather
// than through the multiple backend gateway) need a key provided
// as a string as first parameter of the get/delete methods.
clientsRequireStringKey: { sproxyd: true, cdmi: true },
hasCopyPartBackends: { aws_s3: true, gcp: true },
versioningNotImplBackends: { azure: true, gcp: true },
// user metadata applied on zenko-created objects
zenkoIDHeader: 'x-amz-meta-zenko-instance-id',
// Default expiration value of the S3 pre-signed URL duration // Default expiration value of the S3 pre-signed URL duration
// 604800 seconds (seven days). // 604800 seconds (seven days).
defaultPreSignedURLExpiry: 7 * 24 * 60 * 60, defaultPreSignedURLExpiry: 7 * 24 * 60 * 60,
@ -131,6 +91,10 @@ module.exports = {
's3:ObjectRemoved:DeleteMarkerCreated', 's3:ObjectRemoved:DeleteMarkerCreated',
]), ]),
notificationArnPrefix: 'arn:scality:bucketnotif', notificationArnPrefix: 'arn:scality:bucketnotif',
// some of the available data backends (if called directly rather
// than through the multiple backend gateway) need a key provided
// as a string as first parameter of the get/delete methods.
clientsRequireStringKey: { sproxyd: true, cdmi: true },
// HTTP server keep-alive timeout is set to a higher value than // HTTP server keep-alive timeout is set to a higher value than
// client's free sockets timeout to avoid the risk of triggering // client's free sockets timeout to avoid the risk of triggering
// ECONNRESET errors if the server closes the connection at the // ECONNRESET errors if the server closes the connection at the

View File

@ -7,8 +7,8 @@
"test": "mocha --recursive --timeout 5500 tests/unit" "test": "mocha --recursive --timeout 5500 tests/unit"
}, },
"dependencies": { "dependencies": {
"mocha": "5.2.0", "mocha": "2.5.3",
"async": "~2.6.1", "async": "^2.6.0",
"node-forge": "^0.7.1" "node-forge": "^0.7.1"
} }
} }

View File

@ -17,33 +17,11 @@ class RedisClient {
method: 'RedisClient.constructor', method: 'RedisClient.constructor',
redisHost: config.host, redisHost: config.host,
redisPort: config.port, redisPort: config.port,
}), })
); );
return this; return this;
} }
/**
* scan a pattern and return matching keys
* @param {string} pattern - string pattern to match with all existing keys
* @param {number} [count=10] - scan count
* @param {callback} cb - callback (error, result)
* @return {undefined}
*/
scan(pattern, count = 10, cb) {
const params = { match: pattern, count };
const keys = [];
const stream = this._client.scanStream(params);
stream.on('data', resultKeys => {
for (let i = 0; i < resultKeys.length; i++) {
keys.push(resultKeys[i]);
}
});
stream.on('end', () => {
cb(null, keys);
});
}
/** /**
* increment value of a key by 1 and set a ttl * increment value of a key by 1 and set a ttl
* @param {string} key - key holding the value * @param {string} key - key holding the value
@ -57,17 +35,6 @@ class RedisClient {
.exec(cb); .exec(cb);
} }
/**
* increment value of a key by a given amount
* @param {string} key - key holding the value
* @param {number} amount - amount to increase by
* @param {callback} cb - callback
* @return {undefined}
*/
incrby(key, amount, cb) {
return this._client.incrby(key, amount, cb);
}
/** /**
* increment value of a key by a given amount and set a ttl * increment value of a key by a given amount and set a ttl
* @param {string} key - key holding the value * @param {string} key - key holding the value
@ -83,24 +50,13 @@ class RedisClient {
} }
/** /**
* decrement value of a key by a given amount * execute a batch of commands
* @param {string} key - key holding the value * @param {string[]} cmds - list of commands
* @param {number} amount - amount to increase by
* @param {callback} cb - callback * @param {callback} cb - callback
* @return {undefined} * @return {undefined}
*/ */
decrby(key, amount, cb) { batch(cmds, cb) {
return this._client.decrby(key, amount, cb); return this._client.pipeline(cmds).exec(cb);
}
/**
* get value stored at key
* @param {string} key - key holding the value
* @param {callback} cb - callback
* @return {undefined}
*/
get(key, cb) {
return this._client.get(key, cb);
} }
/** /**
@ -115,16 +71,6 @@ class RedisClient {
return this._client.exists(key, cb); return this._client.exists(key, cb);
} }
/**
* execute a batch of commands
* @param {string[]} cmds - list of commands
* @param {callback} cb - callback
* @return {undefined}
*/
batch(cmds, cb) {
return this._client.pipeline(cmds).exec(cb);
}
/** /**
* Add a value and its score to a sorted set. If no sorted set exists, this * Add a value and its score to a sorted set. If no sorted set exists, this
* will create a new one for the given key. * will create a new one for the given key.
@ -204,26 +150,12 @@ class RedisClient {
return this._client.zrangebyscore(key, min, max, cb); return this._client.zrangebyscore(key, min, max, cb);
} }
/**
* get TTL or expiration in seconds
* @param {string} key - name of key
* @param {function} cb - callback
* @return {undefined}
*/
ttl(key, cb) {
return this._client.ttl(key, cb);
}
clear(cb) { clear(cb) {
return this._client.flushdb(cb); return this._client.flushdb(cb);
} }
disconnect(cb) { disconnect() {
return this._client.quit(cb); this._client.disconnect();
}
listClients(cb) {
return this._client.client('list', cb);
} }
} }

View File

@ -41,11 +41,11 @@ class StatsClient {
/** /**
* build redis key to get total number of occurrences on the server * build redis key to get total number of occurrences on the server
* @param {string} name - key name identifier * @param {string} name - key name identifier
* @param {Date} date - Date instance * @param {object} d - Date instance
* @return {string} key - key for redis * @return {string} key - key for redis
*/ */
buildKey(name, date) { _buildKey(name, d) {
return `${name}:${this._normalizeTimestamp(date)}`; return `${name}:${this._normalizeTimestamp(d)}`;
} }
/** /**
@ -85,35 +85,11 @@ class StatsClient {
amount = (typeof incr === 'number') ? incr : 1; amount = (typeof incr === 'number') ? incr : 1;
} }
const key = this.buildKey(`${id}:requests`, new Date()); const key = this._buildKey(`${id}:requests`, new Date());
return this._redis.incrbyEx(key, amount, this._expiry, callback); return this._redis.incrbyEx(key, amount, this._expiry, callback);
} }
/**
* Increment the given key by the given value.
* @param {String} key - The Redis key to increment
* @param {Number} incr - The value to increment by
* @param {function} [cb] - callback
* @return {undefined}
*/
incrementKey(key, incr, cb) {
const callback = cb || this._noop;
return this._redis.incrby(key, incr, callback);
}
/**
* Decrement the given key by the given value.
* @param {String} key - The Redis key to decrement
* @param {Number} decr - The value to decrement by
* @param {function} [cb] - callback
* @return {undefined}
*/
decrementKey(key, decr, cb) {
const callback = cb || this._noop;
return this._redis.decrby(key, decr, callback);
}
/** /**
* report/record a request that ended up being a 500 on the server * report/record a request that ended up being a 500 on the server
* @param {string} id - service identifier * @param {string} id - service identifier
@ -125,54 +101,10 @@ class StatsClient {
return undefined; return undefined;
} }
const callback = cb || this._noop; const callback = cb || this._noop;
const key = this.buildKey(`${id}:500s`, new Date()); const key = this._buildKey(`${id}:500s`, new Date());
return this._redis.incrEx(key, this._expiry, callback); return this._redis.incrEx(key, this._expiry, callback);
} }
/**
* wrapper on `getStats` that handles a list of keys
* @param {object} log - Werelogs request logger
* @param {array} ids - service identifiers
* @param {callback} cb - callback to call with the err/result
* @return {undefined}
*/
getAllStats(log, ids, cb) {
if (!this._redis) {
return cb(null, {});
}
const statsRes = {
'requests': 0,
'500s': 0,
'sampleDuration': this._expiry,
};
let requests = 0;
let errors = 0;
// for now set concurrency to default of 10
return async.eachLimit(ids, 10, (id, done) => {
this.getStats(log, id, (err, res) => {
if (err) {
return done(err);
}
requests += res.requests;
errors += res['500s'];
return done();
});
}, error => {
if (error) {
log.error('error getting stats', {
error,
method: 'StatsClient.getAllStats',
});
return cb(null, statsRes);
}
statsRes.requests = requests;
statsRes['500s'] = errors;
return cb(null, statsRes);
});
}
/** /**
* get stats for the last x seconds, x being the sampling duration * get stats for the last x seconds, x being the sampling duration
* @param {object} log - Werelogs request logger * @param {object} log - Werelogs request logger
@ -189,8 +121,8 @@ class StatsClient {
const reqsKeys = []; const reqsKeys = [];
const req500sKeys = []; const req500sKeys = [];
for (let i = 0; i < totalKeys; i++) { for (let i = 0; i < totalKeys; i++) {
reqsKeys.push(['get', this.buildKey(`${id}:requests`, d)]); reqsKeys.push(['get', this._buildKey(`${id}:requests`, d)]);
req500sKeys.push(['get', this.buildKey(`${id}:500s`, d)]); req500sKeys.push(['get', this._buildKey(`${id}:500s`, d)]);
this._setPrevInterval(d); this._setPrevInterval(d);
} }
return async.parallel([ return async.parallel([

View File

@ -1,7 +1,4 @@
const async = require('async');
const StatsClient = require('./StatsClient'); const StatsClient = require('./StatsClient');
/** /**
* @class StatsModel * @class StatsModel
* *
@ -9,140 +6,6 @@ const StatsClient = require('./StatsClient');
* rather than by seconds * rather than by seconds
*/ */
class StatsModel extends StatsClient { class StatsModel extends StatsClient {
/**
* Utility method to convert 2d array rows to columns, and vice versa
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip
* @param {array} arrays - 2d array of integers
* @return {array} converted array
*/
_zip(arrays) {
if (arrays.length > 0 && arrays.every(a => Array.isArray(a))) {
return arrays[0].map((_, i) => arrays.map(a => a[i]));
}
return [];
}
/**
* normalize to the nearest interval
* @param {object} d - Date instance
* @return {number} timestamp - normalized to the nearest interval
*/
_normalizeTimestamp(d) {
const m = d.getMinutes();
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0);
}
/**
* override the method to get the count as an array of integers separated
* by each interval
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
* @param {array} arr - each index contains the result of each batch command
* where index 0 signifies the error and index 1 contains the result
* @return {array} array of integers, ordered from most recent interval to
* oldest interval with length of (expiry / interval)
*/
_getCount(arr) {
const size = Math.floor(this._expiry / this._interval);
const array = arr.reduce((store, i) => {
let num = parseInt(i[1], 10);
num = Number.isNaN(num) ? 0 : num;
store.push(num);
return store;
}, []);
if (array.length < size) {
array.push(...Array(size - array.length).fill(0));
}
return array;
}
/**
* wrapper on `getStats` that handles a list of keys
* override the method to reduce the returned 2d array from `_getCount`
* @param {object} log - Werelogs request logger
* @param {array} ids - service identifiers
* @param {callback} cb - callback to call with the err/result
* @return {undefined}
*/
getAllStats(log, ids, cb) {
if (!this._redis) {
return cb(null, {});
}
const size = Math.floor(this._expiry / this._interval);
const statsRes = {
'requests': Array(size).fill(0),
'500s': Array(size).fill(0),
'sampleDuration': this._expiry,
};
const requests = [];
const errors = [];
if (ids.length === 0) {
return cb(null, statsRes);
}
// for now set concurrency to default of 10
return async.eachLimit(ids, 10, (id, done) => {
this.getStats(log, id, (err, res) => {
if (err) {
return done(err);
}
requests.push(res.requests);
errors.push(res['500s']);
return done();
});
}, error => {
if (error) {
log.error('error getting stats', {
error,
method: 'StatsModel.getAllStats',
});
return cb(null, statsRes);
}
statsRes.requests = this._zip(requests).map(arr =>
arr.reduce((acc, i) => acc + i), 0);
statsRes['500s'] = this._zip(errors).map(arr =>
arr.reduce((acc, i) => acc + i), 0);
return cb(null, statsRes);
});
}
/**
* Handles getting a list of global keys.
* @param {array} ids - Service identifiers
* @param {object} log - Werelogs request logger
* @param {function} cb - Callback
* @return {undefined}
*/
getAllGlobalStats(ids, log, cb) {
const reqsKeys = ids.map(key => (['get', key]));
return this._redis.batch(reqsKeys, (err, res) => {
const statsRes = { requests: 0 };
if (err) {
log.error('error getting metrics', {
error: err,
method: 'StatsClient.getAllGlobalStats',
});
return cb(null, statsRes);
}
statsRes.requests = res.reduce((sum, curr) => {
const [cmdErr, val] = curr;
if (cmdErr) {
// Log any individual request errors from the batch request.
log.error('error getting metrics', {
error: cmdErr,
method: 'StatsClient.getAllGlobalStats',
});
}
return sum + (Number.parseInt(val, 10) || 0);
}, 0);
return cb(null, statsRes);
});
}
/** /**
* normalize date timestamp to the nearest hour * normalize date timestamp to the nearest hour
* @param {Date} d - Date instance * @param {Date} d - Date instance
@ -161,6 +24,34 @@ class StatsModel extends StatsClient {
return d.setHours(d.getHours() - 1); return d.setHours(d.getHours() - 1);
} }
/**
* normalize to the nearest interval
* @param {object} d - Date instance
* @return {number} timestamp - normalized to the nearest interval
*/
_normalizeTimestamp(d) {
const m = d.getMinutes();
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0);
}
/**
* override the method to get the result as an array of integers separated
* by each interval
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
* @param {array} arr - each index contains the result of each batch command
* where index 0 signifies the error and index 1 contains the result
* @return {array} array of integers, ordered from most recent interval to
* oldest interval
*/
_getCount(arr) {
return arr.reduce((store, i) => {
let num = parseInt(i[1], 10);
num = Number.isNaN(num) ? 0 : num;
store.push(num);
return store;
}, []);
}
/** /**
* get list of sorted set key timestamps * get list of sorted set key timestamps
* @param {number} epoch - epoch time * @param {number} epoch - epoch time

View File

@ -1,237 +0,0 @@
/**
* Helper class to ease access to the Azure specific information for
* storage accounts mapped to buckets.
*/
class BucketAzureInfo {
/**
* @constructor
* @param {object} obj - Raw structure for the Azure info on storage account
* @param {string} obj.sku - SKU name of this storage account
* @param {string} obj.accessTier - Access Tier name of this storage account
* @param {string} obj.kind - Kind name of this storage account
* @param {string[]} obj.systemKeys - pair of shared keys for the system
* @param {string[]} obj.tenantKeys - pair of shared keys for the tenant
* @param {string} obj.subscriptionId - subscription ID the storage account
* belongs to
* @param {string} obj.resourceGroup - Resource group name the storage
* account belongs to
* @param {object} obj.deleteRetentionPolicy - Delete retention policy
* @param {boolean} obj.deleteRetentionPolicy.enabled -
* @param {number} obj.deleteRetentionPolicy.days -
* @param {object[]} obj.managementPolicies - Management policies for this
* storage account
* @param {boolean} obj.httpsOnly - Server the content of this storage
* account through HTTPS only
* @param {object} obj.tags - Set of tags applied on this storage account
* @param {object[]} obj.networkACL - Network ACL of this storage account
* @param {string} obj.cname - CNAME of this storage account
* @param {boolean} obj.azureFilesAADIntegration - whether or not Azure
* Files AAD Integration is enabled for this storage account
* @param {boolean} obj.hnsEnabled - whether or not a hierarchical namespace
* is enabled for this storage account
* @param {object} obj.logging - service properties: logging
* @param {object} obj.hourMetrics - service properties: hourMetrics
* @param {object} obj.minuteMetrics - service properties: minuteMetrics
* @param {string} obj.serviceVersion - service properties: serviceVersion
*/
constructor(obj) {
this._data = {
sku: obj.sku,
accessTier: obj.accessTier,
kind: obj.kind,
systemKeys: obj.systemKeys,
tenantKeys: obj.tenantKeys,
subscriptionId: obj.subscriptionId,
resourceGroup: obj.resourceGroup,
deleteRetentionPolicy: obj.deleteRetentionPolicy,
managementPolicies: obj.managementPolicies,
httpsOnly: obj.httpsOnly,
tags: obj.tags,
networkACL: obj.networkACL,
cname: obj.cname,
azureFilesAADIntegration: obj.azureFilesAADIntegration,
hnsEnabled: obj.hnsEnabled,
logging: obj.logging,
hourMetrics: obj.hourMetrics,
minuteMetrics: obj.minuteMetrics,
serviceVersion: obj.serviceVersion,
};
}
getSku() {
return this._data.sku;
}
setSku(sku) {
this._data.sku = sku;
return this;
}
getAccessTier() {
return this._data.accessTier;
}
setAccessTier(accessTier) {
this._data.accessTier = accessTier;
return this;
}
getKind() {
return this._data.kind;
}
setKind(kind) {
this._data.kind = kind;
return this;
}
getSystemKeys() {
return this._data.systemKeys;
}
setSystemKeys(systemKeys) {
this._data.systemKeys = systemKeys;
return this;
}
getTenantKeys() {
return this._data.tenantKeys;
}
setTenantKeys(tenantKeys) {
this._data.tenantKeys = tenantKeys;
return this;
}
getSubscriptionId() {
return this._data.subscriptionId;
}
setSubscriptionId(subscriptionId) {
this._data.subscriptionId = subscriptionId;
return this;
}
getResourceGroup() {
return this._data.resourceGroup;
}
setResourceGroup(resourceGroup) {
this._data.resourceGroup = resourceGroup;
return this;
}
getDeleteRetentionPolicy() {
return this._data.deleteRetentionPolicy;
}
setDeleteRetentionPolicy(deleteRetentionPolicy) {
this._data.deleteRetentionPolicy = deleteRetentionPolicy;
return this;
}
getManagementPolicies() {
return this._data.managementPolicies;
}
setManagementPolicies(managementPolicies) {
this._data.managementPolicies = managementPolicies;
return this;
}
getHttpsOnly() {
return this._data.httpsOnly;
}
setHttpsOnly(httpsOnly) {
this._data.httpsOnly = httpsOnly;
return this;
}
getTags() {
return this._data.tags;
}
setTags(tags) {
this._data.tags = tags;
return this;
}
getNetworkACL() {
return this._data.networkACL;
}
setNetworkACL(networkACL) {
this._data.networkACL = networkACL;
return this;
}
getCname() {
return this._data.cname;
}
setCname(cname) {
this._data.cname = cname;
return this;
}
getAzureFilesAADIntegration() {
return this._data.azureFilesAADIntegration;
}
setAzureFilesAADIntegration(azureFilesAADIntegration) {
this._data.azureFilesAADIntegration = azureFilesAADIntegration;
return this;
}
getHnsEnabled() {
return this._data.hnsEnabled;
}
setHnsEnabled(hnsEnabled) {
this._data.hnsEnabled = hnsEnabled;
return this;
}
getLogging() {
return this._data.logging;
}
setLogging(logging) {
this._data.logging = logging;
return this;
}
getHourMetrics() {
return this._data.hourMetrics;
}
setHourMetrics(hourMetrics) {
this._data.hourMetrics = hourMetrics;
return this;
}
getMinuteMetrics() {
return this._data.minuteMetrics;
}
setMinuteMetrics(minuteMetrics) {
this._data.minuteMetrics = minuteMetrics;
return this;
}
getServiceVersion() {
return this._data.serviceVersion;
}
setServiceVersion(serviceVersion) {
this._data.serviceVersion = serviceVersion;
return this;
}
getValue() {
return this._data;
}
}
module.exports = BucketAzureInfo;

View File

@ -9,9 +9,8 @@ const BucketPolicy = require('./BucketPolicy');
const NotificationConfiguration = require('./NotificationConfiguration'); const NotificationConfiguration = require('./NotificationConfiguration');
// WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG // WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG
// BucketInfoModelVersion.md can be found in documentation/ at the root // BucketInfoModelVersion.md can be found in the root of this repository
// of this repository const modelVersion = 10;
const modelVersion = 14;
class BucketInfo { class BucketInfo {
/** /**
@ -42,8 +41,7 @@ class BucketInfo {
* @param {object} versioningConfiguration - versioning configuration * @param {object} versioningConfiguration - versioning configuration
* @param {string} versioningConfiguration.Status - versioning status * @param {string} versioningConfiguration.Status - versioning status
* @param {object} versioningConfiguration.MfaDelete - versioning mfa delete * @param {object} versioningConfiguration.MfaDelete - versioning mfa delete
* @param {string} locationConstraint - locationConstraint for bucket that * @param {string} locationConstraint - locationConstraint for bucket
* also includes the ingestion flag
* @param {WebsiteConfiguration} [websiteConfiguration] - website * @param {WebsiteConfiguration} [websiteConfiguration] - website
* configuration * configuration
* @param {object[]} [cors] - collection of CORS rules to apply * @param {object[]} [cors] - collection of CORS rules to apply
@ -59,11 +57,6 @@ class BucketInfo {
* @param {object} [lifecycleConfiguration] - lifecycle configuration * @param {object} [lifecycleConfiguration] - lifecycle configuration
* @param {object} [bucketPolicy] - bucket policy * @param {object} [bucketPolicy] - bucket policy
* @param {string} [uid] - unique identifier for the bucket, necessary * @param {string} [uid] - unique identifier for the bucket, necessary
* @param {string} readLocationConstraint - readLocationConstraint for bucket
* addition for use with lifecycle operations
* @param {boolean} [isNFS] - whether the bucket is on NFS
* @param {object} [ingestionConfig] - object for ingestion status: en/dis
* @param {object} [azureInfo] - Azure storage account specific info
* @param {boolean} [objectLockEnabled] - true when object lock enabled * @param {boolean} [objectLockEnabled] - true when object lock enabled
* @param {object} [objectLockConfiguration] - object lock configuration * @param {object} [objectLockConfiguration] - object lock configuration
* @param {object} [notificationConfiguration] - bucket notification configuration * @param {object} [notificationConfiguration] - bucket notification configuration
@ -73,9 +66,8 @@ class BucketInfo {
serverSideEncryption, versioningConfiguration, serverSideEncryption, versioningConfiguration,
locationConstraint, websiteConfiguration, cors, locationConstraint, websiteConfiguration, cors,
replicationConfiguration, lifecycleConfiguration, replicationConfiguration, lifecycleConfiguration,
bucketPolicy, uid, readLocationConstraint, isNFS, bucketPolicy, uid, objectLockEnabled, objectLockConfiguration,
ingestionConfig, azureInfo, objectLockEnabled, notificationConfiguration) {
objectLockConfiguration, notificationConfiguration) {
assert.strictEqual(typeof name, 'string'); assert.strictEqual(typeof name, 'string');
assert.strictEqual(typeof owner, 'string'); assert.strictEqual(typeof owner, 'string');
assert.strictEqual(typeof ownerDisplayName, 'string'); assert.strictEqual(typeof ownerDisplayName, 'string');
@ -116,15 +108,6 @@ class BucketInfo {
if (locationConstraint) { if (locationConstraint) {
assert.strictEqual(typeof locationConstraint, 'string'); assert.strictEqual(typeof locationConstraint, 'string');
} }
if (ingestionConfig) {
assert.strictEqual(typeof ingestionConfig, 'object');
}
if (azureInfo) {
assert.strictEqual(typeof azureInfo, 'object');
}
if (readLocationConstraint) {
assert.strictEqual(typeof readLocationConstraint, 'string');
}
if (websiteConfiguration) { if (websiteConfiguration) {
assert(websiteConfiguration instanceof WebsiteConfiguration); assert(websiteConfiguration instanceof WebsiteConfiguration);
const { indexDocument, errorDocument, redirectAllRequestsTo, const { indexDocument, errorDocument, redirectAllRequestsTo,
@ -181,16 +164,12 @@ class BucketInfo {
this._serverSideEncryption = serverSideEncryption || null; this._serverSideEncryption = serverSideEncryption || null;
this._versioningConfiguration = versioningConfiguration || null; this._versioningConfiguration = versioningConfiguration || null;
this._locationConstraint = locationConstraint || null; this._locationConstraint = locationConstraint || null;
this._readLocationConstraint = readLocationConstraint || null;
this._websiteConfiguration = websiteConfiguration || null; this._websiteConfiguration = websiteConfiguration || null;
this._replicationConfiguration = replicationConfiguration || null; this._replicationConfiguration = replicationConfiguration || null;
this._cors = cors || null; this._cors = cors || null;
this._lifecycleConfiguration = lifecycleConfiguration || null; this._lifecycleConfiguration = lifecycleConfiguration || null;
this._bucketPolicy = bucketPolicy || null; this._bucketPolicy = bucketPolicy || null;
this._uid = uid || uuid(); this._uid = uid || uuid();
this._isNFS = isNFS || null;
this._ingestion = ingestionConfig || null;
this._azureInfo = azureInfo || null;
this._objectLockEnabled = objectLockEnabled || false; this._objectLockEnabled = objectLockEnabled || false;
this._objectLockConfiguration = objectLockConfiguration || null; this._objectLockConfiguration = objectLockConfiguration || null;
this._notificationConfiguration = notificationConfiguration || null; this._notificationConfiguration = notificationConfiguration || null;
@ -213,16 +192,12 @@ class BucketInfo {
serverSideEncryption: this._serverSideEncryption, serverSideEncryption: this._serverSideEncryption,
versioningConfiguration: this._versioningConfiguration, versioningConfiguration: this._versioningConfiguration,
locationConstraint: this._locationConstraint, locationConstraint: this._locationConstraint,
readLocationConstraint: this._readLocationConstraint,
websiteConfiguration: undefined, websiteConfiguration: undefined,
cors: this._cors, cors: this._cors,
replicationConfiguration: this._replicationConfiguration, replicationConfiguration: this._replicationConfiguration,
lifecycleConfiguration: this._lifecycleConfiguration, lifecycleConfiguration: this._lifecycleConfiguration,
bucketPolicy: this._bucketPolicy, bucketPolicy: this._bucketPolicy,
uid: this._uid, uid: this._uid,
isNFS: this._isNFS,
ingestion: this._ingestion,
azureInfo: this._azureInfo,
objectLockEnabled: this._objectLockEnabled, objectLockEnabled: this._objectLockEnabled,
objectLockConfiguration: this._objectLockConfiguration, objectLockConfiguration: this._objectLockConfiguration,
notificationConfiguration: this._notificationConfiguration, notificationConfiguration: this._notificationConfiguration,
@ -247,8 +222,7 @@ class BucketInfo {
obj.transient, obj.deleted, obj.serverSideEncryption, obj.transient, obj.deleted, obj.serverSideEncryption,
obj.versioningConfiguration, obj.locationConstraint, websiteConfig, obj.versioningConfiguration, obj.locationConstraint, websiteConfig,
obj.cors, obj.replicationConfiguration, obj.lifecycleConfiguration, obj.cors, obj.replicationConfiguration, obj.lifecycleConfiguration,
obj.bucketPolicy, obj.uid, obj.readLocationConstraint, obj.isNFS, obj.bucketPolicy, obj.uid, obj.objectLockEnabled,
obj.ingestion, obj.azureInfo, obj.objectLockEnabled,
obj.objectLockConfiguration, obj.notificationConfiguration); obj.objectLockConfiguration, obj.notificationConfiguration);
} }
@ -273,10 +247,8 @@ class BucketInfo {
data._versioningConfiguration, data._locationConstraint, data._versioningConfiguration, data._locationConstraint,
data._websiteConfiguration, data._cors, data._websiteConfiguration, data._cors,
data._replicationConfiguration, data._lifecycleConfiguration, data._replicationConfiguration, data._lifecycleConfiguration,
data._bucketPolicy, data._uid, data._readLocationConstraint, data._bucketPolicy, data._uid, data._objectLockEnabled,
data._isNFS, data._ingestion, data._azureInfo, data._objectLockConfiguration, data._notificationConfiguration);
data._objectLockEnabled, data._objectLockConfiguration,
data._notificationConfiguration);
} }
/** /**
@ -573,17 +545,6 @@ class BucketInfo {
return this._locationConstraint; return this._locationConstraint;
} }
/**
* Get read location constraint.
* @return {string} - bucket read location constraint
*/
getReadLocationConstraint() {
if (this._readLocationConstraint) {
return this._readLocationConstraint;
}
return this._locationConstraint;
}
/** /**
* Set Bucket model version * Set Bucket model version
* *
@ -672,85 +633,6 @@ class BucketInfo {
this._uid = uid; this._uid = uid;
return this; return this;
} }
/**
* Check if the bucket is an NFS bucket.
* @return {boolean} - Wether the bucket is NFS or not
*/
isNFS() {
return this._isNFS;
}
/**
* Set whether the bucket is an NFS bucket.
* @param {boolean} isNFS - Wether the bucket is NFS or not
* @return {BucketInfo} - bucket info instance
*/
setIsNFS(isNFS) {
this._isNFS = isNFS;
return this;
}
/**
* enable ingestion, set 'this._ingestion' to { status: 'enabled' }
* @return {BucketInfo} - bucket info instance
*/
enableIngestion() {
this._ingestion = { status: 'enabled' };
return this;
}
/**
* disable ingestion, set 'this._ingestion' to { status: 'disabled' }
* @return {BucketInfo} - bucket info instance
*/
disableIngestion() {
this._ingestion = { status: 'disabled' };
return this;
}
/**
* Get ingestion configuration
* @return {object} - bucket ingestion configuration: Enabled or Disabled
*/
getIngestion() {
return this._ingestion;
}
/**
** Check if bucket is an ingestion bucket
* @return {boolean} - 'true' if bucket is ingestion bucket, 'false' if
* otherwise
*/
isIngestionBucket() {
const ingestionConfig = this.getIngestion();
if (ingestionConfig) {
return true;
}
return false;
}
/**
* Check if ingestion is enabled
* @return {boolean} - 'true' if ingestion is enabled, otherwise 'false'
*/
isIngestionEnabled() {
const ingestionConfig = this.getIngestion();
return ingestionConfig ? ingestionConfig.status === 'enabled' : false;
}
/**
* Return the Azure specific storage account information for this bucket
* @return {object} - a structure suitable for {@link BucketAzureIno}
* constructor
*/
getAzureInfo() {
return this._azureInfo;
}
/**
* Set the Azure specific storage account information for this bucket
* @param {object} azureInfo - a structure suitable for
* {@link BucketAzureInfo} construction
* @return {BucketInfo} - bucket info instance
*/
setAzureInfo(azureInfo) {
this._azureInfo = azureInfo;
return this;
}
/** /**
* Check if object lock is enabled. * Check if object lock is enabled.
* @return {boolean} - depending on whether object lock is enabled * @return {boolean} - depending on whether object lock is enabled

View File

@ -5,8 +5,6 @@ const errors = require('../errors');
const LifecycleRule = require('./LifecycleRule'); const LifecycleRule = require('./LifecycleRule');
const escapeForXml = require('../s3middleware/escapeForXml'); const escapeForXml = require('../s3middleware/escapeForXml');
const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer.
/** /**
* Format of xml request: * Format of xml request:
@ -87,13 +85,10 @@ class LifecycleConfiguration {
/** /**
* Create a Lifecycle Configuration instance * Create a Lifecycle Configuration instance
* @param {string} xml - the parsed xml * @param {string} xml - the parsed xml
* @param {object} config - the CloudServer config
* @return {object} - LifecycleConfiguration instance * @return {object} - LifecycleConfiguration instance
*/ */
constructor(xml, config) { constructor(xml) {
this._parsedXML = xml; this._parsedXML = xml;
this._storageClasses =
config.replicationEndpoints.map(endpoint => endpoint.site);
this._ruleIDs = []; this._ruleIDs = [];
this._tagKeys = []; this._tagKeys = [];
this._config = {}; this._config = {};
@ -216,10 +211,9 @@ class LifecycleConfiguration {
*/ */
_parseRule(rule) { _parseRule(rule) {
const ruleObj = {}; const ruleObj = {};
if (rule.NoncurrentVersionTransition) { if (rule.Transition || rule.NoncurrentVersionTransition) {
ruleObj.error = errors.NotImplemented.customizeDescription( ruleObj.error = errors.NotImplemented.customizeDescription(
'NoncurrentVersionTransition lifecycle action not yet ' + 'Transition lifecycle action not yet implemented');
'implemented');
return ruleObj; return ruleObj;
} }
// Either Prefix or Filter must be included, but can be empty string // Either Prefix or Filter must be included, but can be empty string
@ -474,315 +468,6 @@ class LifecycleConfiguration {
return statusObj; return statusObj;
} }
/**
* Finds the prefix and/or tags of the given rule and gets the error message
* @param {object} rule - The rule to find the prefix in
* @return {string} - The prefix of filter information
*/
_getRuleFilterDesc(rule) {
if (rule.Prefix) {
return `prefix '${rule.Prefix[0]}'`;
}
// There must be a filter if no top-level prefix is provided. First
// check if there are multiple filters (i.e. `Filter.And`).
if (rule.Filter[0] === undefined || rule.Filter[0].And === undefined) {
const { Prefix, Tag } = rule.Filter[0] || {};
if (Prefix) {
return `filter '(prefix=${Prefix[0]})'`;
}
if (Tag) {
const { Key, Value } = Tag[0];
return `filter '(tag: key=${Key[0]}, value=${Value[0]})'`;
}
return 'filter (all)';
}
const filters = [];
const { Prefix, Tag } = rule.Filter[0].And[0];
if (Prefix) {
filters.push(`prefix=${Prefix[0]}`);
}
Tag.forEach(tag => {
const { Key, Value } = tag;
filters.push(`tag: key=${Key[0]}, value=${Value[0]}`);
});
const joinedFilters = filters.join(' and ');
return `filter '(${joinedFilters})'`;
}
/**
* Checks the validity of the given field
* @param {object} params - Given function parameters
* @param {string} params.days - The value of the field to check
* @param {string} params.field - The field name with the value
* @param {string} params.ancestor - The immediate ancestor field
* @return {object|null} Returns an error object or `null`
*/
_checkDays(params) {
const { days, field, ancestor } = params;
if (days < 0) {
const msg = `'${field}' in ${ancestor} action must be nonnegative`;
return errors.InvalidArgument.customizeDescription(msg);
}
if (days > MAX_DAYS) {
return errors.MalformedXML.customizeDescription(
`'${field}' in ${ancestor} action must not exceed ${MAX_DAYS}`);
}
return null;
}
/**
* Checks the validity of the given storage class
* @param {object} params - Given function parameters
* @param {array} params.usedStorageClasses - Storage classes used in other
* rules
* @param {string} params.storageClass - The storage class of the current
* rule
* @param {string} params.ancestor - The immediate ancestor field
* @param {string} params.prefix - The prefix of the rule
* @return {object|null} Returns an error object or `null`
*/
_checkStorageClasses(params) {
const { usedStorageClasses, storageClass, ancestor, rule } = params;
if (!this._storageClasses.includes(storageClass)) {
// This differs from the AWS message. This will help the user since
// the StorageClass does not conform to AWS specs.
const list = `'${this._storageClasses.join("', '")}'`;
const msg = `'StorageClass' must be one of ${list}`;
return errors.MalformedXML.customizeDescription(msg);
}
if (usedStorageClasses.includes(storageClass)) {
const msg = `'StorageClass' must be different for '${ancestor}' ` +
`actions in same 'Rule' with ${this._getRuleFilterDesc(rule)}`;
return errors.InvalidRequest.customizeDescription(msg);
}
return null;
}
/**
* Ensure that transition rules are at least a day apart from each other.
* @param {object} params - Given function parameters
* @param {string} [params.days] - The days of the current transition
* @param {string} [params.date] - The date of the current transition
* @param {string} params.storageClass - The storage class of the current
* rule
* @param {string} params.rule - The current rule
* @return {undefined}
*/
_checkTimeGap(params) {
const { days, date, storageClass, rule } = params;
const invalidTransition = rule.Transition.find(transition => {
if (storageClass === transition.StorageClass[0]) {
return false;
}
if (days !== undefined) {
return Number.parseInt(transition.Days[0], 10) === days;
}
if (date !== undefined) {
const timestamp = new Date(date).getTime();
const compareTimestamp = new Date(transition.Date[0]).getTime();
const oneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day.
return Math.abs(timestamp - compareTimestamp) < oneDay;
}
return false;
});
if (invalidTransition) {
const timeType = days !== undefined ? 'Days' : 'Date';
const filterMsg = this._getRuleFilterDesc(rule);
const compareStorageClass = invalidTransition.StorageClass[0];
const msg = `'${timeType}' in the 'Transition' action for ` +
`StorageClass '${storageClass}' for ${filterMsg} must be at ` +
`least one day apart from ${filterMsg} in the 'Transition' ` +
`action for StorageClass '${compareStorageClass}'`;
return errors.InvalidArgument.customizeDescription(msg);
}
return undefined;
}
/**
* Checks transition time type (i.e. 'Date' or 'Days') only occurs once
* across transitions and across transitions and expiration policies
* @param {object} params - Given function parameters
* @param {string} params.usedTimeType - The time type that has been used by
* another rule
* @param {string} params.currentTimeType - the time type used by the
* current rule
* @param {string} params.rule - The current rule
* @return {object|null} Returns an error object or `null`
*/
_checkTimeType(params) {
const { usedTimeType, currentTimeType, rule } = params;
if (usedTimeType && usedTimeType !== currentTimeType) {
const msg = "Found mixed 'Date' and 'Days' based Transition " +
'actions in lifecycle rule for ' +
`${this._getRuleFilterDesc(rule)}`;
return errors.InvalidRequest.customizeDescription(msg);
}
// Transition time type cannot differ from the expiration, if provided.
if (rule.Expiration &&
rule.Expiration[0][currentTimeType] === undefined) {
const msg = "Found mixed 'Date' and 'Days' based Expiration and " +
'Transition actions in lifecycle rule for ' +
`${this._getRuleFilterDesc(rule)}`;
return errors.InvalidRequest.customizeDescription(msg);
}
return null;
}
/**
* Checks the validity of the given date
* @param {string} date - The date the check
* @return {object|null} Returns an error object or `null`
*/
_checkDate(date) {
const isoRegex = new RegExp('^(-?(?:[1-9][0-9]*)?[0-9]{4})-' +
'(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9])' +
':([0-5][0-9]):([0-5][0-9])(.[0-9]+)?(Z)?$');
if (!isoRegex.test(date)) {
const msg = 'Date must be in ISO 8601 format';
return errors.InvalidArgument.customizeDescription(msg);
}
return null;
}
/**
* Parses the NonCurrentVersionTransition value
* @param {object} rule - Rule object from Rule array from this._parsedXml
* @return {object} - Contains error if parsing failed, otherwise contains
* the parsed nonCurrentVersionTransition array
*
* Format of result:
* result = {
* error: <error>,
* nonCurrentVersionTransition: [
* {
* noncurrentDays: <non-current-days>,
* storageClass: <storage-class>,
* },
* ...
* ]
* }
*/
_parseNoncurrentVersionTransition(rule) {
const nonCurrentVersionTransition = [];
const usedStorageClasses = [];
for (let i = 0; i < rule.NoncurrentVersionTransition.length; i++) {
const t = rule.NoncurrentVersionTransition[i]; // Transition object
const noncurrentDays =
t.NoncurrentDays && Number.parseInt(t.NoncurrentDays[0], 10);
const storageClass = t.StorageClass && t.StorageClass[0];
if (noncurrentDays === undefined || storageClass === undefined) {
return { error: errors.MalformedXML };
}
let error = this._checkDays({
days: noncurrentDays,
field: 'NoncurrentDays',
ancestor: 'NoncurrentVersionTransition',
});
if (error) {
return { error };
}
error = this._checkStorageClasses({
storageClass,
usedStorageClasses,
ancestor: 'NoncurrentVersionTransition',
rule,
});
if (error) {
return { error };
}
nonCurrentVersionTransition.push({ noncurrentDays, storageClass });
usedStorageClasses.push(storageClass);
}
return { nonCurrentVersionTransition };
}
/**
* Parses the Transition value
* @param {object} rule - Rule object from Rule array from this._parsedXml
* @return {object} - Contains error if parsing failed, otherwise contains
* the parsed transition array
*
* Format of result:
* result = {
* error: <error>,
* transition: [
* {
* days: <days>,
* date: <date>,
* storageClass: <storage-class>,
* },
* ...
* ]
* }
*/
_parseTransition(rule) {
const transition = [];
const usedStorageClasses = [];
let usedTimeType = null;
for (let i = 0; i < rule.Transition.length; i++) {
const t = rule.Transition[i]; // Transition object
const days = t.Days && Number.parseInt(t.Days[0], 10);
const date = t.Date && t.Date[0];
const storageClass = t.StorageClass && t.StorageClass[0];
if ((days === undefined && date === undefined) ||
(days !== undefined && date !== undefined) ||
(storageClass === undefined)) {
return { error: errors.MalformedXML };
}
let error = this._checkStorageClasses({
storageClass,
usedStorageClasses,
ancestor: 'Transition',
rule,
});
if (error) {
return { error };
}
usedStorageClasses.push(storageClass);
if (days !== undefined) {
error = this._checkTimeType({
usedTimeType,
currentTimeType: 'Days',
rule,
});
if (error) {
return { error };
}
usedTimeType = 'Days';
error = this._checkDays({
days,
field: 'Days',
ancestor: 'Transition',
});
if (error) {
return { error };
}
transition.push({ days, storageClass });
}
if (date !== undefined) {
error = this._checkTimeType({
usedTimeType,
currentTimeType: 'Date',
rule,
});
if (error) {
return { error };
}
usedTimeType = 'Date';
error = this._checkDate(date);
if (error) {
return { error };
}
transition.push({ date, storageClass });
}
error = this._checkTimeGap({ days, date, storageClass, rule });
if (error) {
return { error };
}
}
return { transition };
}
/** /**
* Check that action component of rule is valid * Check that action component of rule is valid
* @param {object} rule - a rule object from Rule array from this._parsedXml * @param {object} rule - a rule object from Rule array from this._parsedXml
@ -807,13 +492,8 @@ class LifecycleConfiguration {
const actionsObj = {}; const actionsObj = {};
actionsObj.propName = 'actions'; actionsObj.propName = 'actions';
actionsObj.actions = []; actionsObj.actions = [];
const validActions = [ const validActions = ['AbortIncompleteMultipartUpload',
'AbortIncompleteMultipartUpload', 'Expiration', 'NoncurrentVersionExpiration'];
'Expiration',
'NoncurrentVersionExpiration',
'NoncurrentVersionTransition',
'Transition',
];
validActions.forEach(a => { validActions.forEach(a => {
if (rule[a]) { if (rule[a]) {
actionsObj.actions.push({ actionName: `${a}` }); actionsObj.actions.push({ actionName: `${a}` });
@ -830,8 +510,7 @@ class LifecycleConfiguration {
if (action.error) { if (action.error) {
actionsObj.error = action.error; actionsObj.error = action.error;
} else { } else {
const actionTimes = ['days', 'date', 'deleteMarker', const actionTimes = ['days', 'date', 'deleteMarker'];
'transition', 'nonCurrentVersionTransition'];
actionTimes.forEach(t => { actionTimes.forEach(t => {
if (action[t]) { if (action[t]) {
// eslint-disable-next-line no-param-reassign // eslint-disable-next-line no-param-reassign
@ -918,9 +597,12 @@ class LifecycleConfiguration {
return expObj; return expObj;
} }
if (subExp.Date) { if (subExp.Date) {
const error = this._checkDate(subExp.Date[0]); const isoRegex = new RegExp('^(-?(?:[1-9][0-9]*)?[0-9]{4})-' +
if (error) { '(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9])' +
expObj.error = error; ':([0-5][0-9]):([0-5][0-9])(.[0-9]+)?(Z)?$');
if (!isoRegex.test(subExp.Date[0])) {
expObj.error = errors.InvalidArgument.customizeDescription(
'Date must be in ISO 8601 format');
} else { } else {
expObj.date = subExp.Date[0]; expObj.date = subExp.Date[0];
} }
@ -1032,26 +714,6 @@ class LifecycleConfiguration {
if (a.deleteMarker) { if (a.deleteMarker) {
assert.strictEqual(typeof a.deleteMarker, 'string'); assert.strictEqual(typeof a.deleteMarker, 'string');
} }
if (a.nonCurrentVersionTransition) {
assert.strictEqual(
typeof a.nonCurrentVersionTransition, 'object');
a.nonCurrentVersionTransition.forEach(t => {
assert.strictEqual(typeof t.noncurrentDays, 'number');
assert.strictEqual(typeof t.storageClass, 'string');
});
}
if (a.transition) {
assert.strictEqual(typeof a.transition, 'object');
a.transition.forEach(t => {
if (t.days || t.days === 0) {
assert.strictEqual(typeof t.days, 'number');
}
if (t.date !== undefined) {
assert.strictEqual(typeof t.date, 'string');
}
assert.strictEqual(typeof t.storageClass, 'string');
});
}
}); });
}); });
} }
@ -1101,8 +763,7 @@ class LifecycleConfiguration {
} }
const Actions = actions.map(action => { const Actions = actions.map(action => {
const { actionName, days, date, deleteMarker, const { actionName, days, date, deleteMarker } = action;
nonCurrentVersionTransition, transition } = action;
let Action; let Action;
if (actionName === 'AbortIncompleteMultipartUpload') { if (actionName === 'AbortIncompleteMultipartUpload') {
Action = `<${actionName}><DaysAfterInitiation>${days}` + Action = `<${actionName}><DaysAfterInitiation>${days}` +
@ -1119,40 +780,6 @@ class LifecycleConfiguration {
Action = `<${actionName}>${Days}${Date}${DelMarker}` + Action = `<${actionName}>${Days}${Date}${DelMarker}` +
`</${actionName}>`; `</${actionName}>`;
} }
if (actionName === 'NoncurrentVersionTransition') {
const xml = [];
nonCurrentVersionTransition.forEach(transition => {
const { noncurrentDays, storageClass } = transition;
xml.push(
`<${actionName}>`,
`<NoncurrentDays>${noncurrentDays}` +
'</NoncurrentDays>',
`<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>`,
);
});
Action = xml.join('');
}
if (actionName === 'Transition') {
const xml = [];
transition.forEach(transition => {
const { days, date, storageClass } = transition;
let element;
if (days !== undefined) {
element = `<Days>${days}</Days>`;
}
if (date !== undefined) {
element = `<Date>${date}</Date>`;
}
xml.push(
`<${actionName}>`,
element,
`<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>`,
);
});
Action = xml.join('');
}
return Action; return Action;
}).join(''); }).join('');
return `<Rule>${ID}${Status}${Filter}${Actions}</Rule>`; return `<Rule>${ID}${Status}${Filter}${Actions}</Rule>`;

View File

@ -1,5 +1,3 @@
const crypto = require('crypto');
const constants = require('../constants'); const constants = require('../constants');
const VersionIDUtils = require('../versioning/VersionID'); const VersionIDUtils = require('../versioning/VersionID');
@ -10,6 +8,7 @@ const ObjectMDLocation = require('./ObjectMDLocation');
* mpuPart metadata for example) * mpuPart metadata for example)
*/ */
class ObjectMD { class ObjectMD {
/** /**
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is * Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
* reserved for internal use, users should call * reserved for internal use, users should call
@ -29,14 +28,9 @@ class ObjectMD {
} else { } else {
this._updateFromParsedJSON(objMd); this._updateFromParsedJSON(objMd);
} }
if (!this._data['creation-time']) {
this.setCreationTime(this.getLastModified());
}
} else { } else {
// set newly-created object md modified time to current time // set newly-created object md modified time to current time
const dt = new Date().toJSON(); this._data['last-modified'] = new Date().toJSON();
this.setLastModified(dt);
this.setCreationTime(dt);
} }
// set latest md model version now that we ensured // set latest md model version now that we ensured
// backward-compat conversion // backward-compat conversion
@ -91,8 +85,6 @@ class ObjectMD {
'content-length': 0, 'content-length': 0,
'content-type': '', 'content-type': '',
'content-md5': '', 'content-md5': '',
'content-language': '',
'creation-time': undefined,
// simple/no version. will expand once object versioning is // simple/no version. will expand once object versioning is
// introduced // introduced
'x-amz-version-id': 'null', 'x-amz-version-id': 'null',
@ -114,7 +106,6 @@ class ObjectMD {
}, },
'key': '', 'key': '',
'location': null, 'location': null,
'azureInfo': undefined,
// versionId, isNull, nullVersionId and isDeleteMarker // versionId, isNull, nullVersionId and isDeleteMarker
// should be undefined when not set explicitly // should be undefined when not set explicitly
'isNull': undefined, 'isNull': undefined,
@ -133,7 +124,6 @@ class ObjectMD {
role: '', role: '',
storageType: '', storageType: '',
dataStoreVersionId: '', dataStoreVersionId: '',
isNFS: null,
}, },
'dataStoreName': '', 'dataStoreName': '',
'originOp': '', 'originOp': '',
@ -366,50 +356,6 @@ class ObjectMD {
return this._data['content-md5']; return this._data['content-md5'];
} }
/**
* Set content-language
*
* @param {string} contentLanguage - content-language
* @return {ObjectMD} itself
*/
setContentLanguage(contentLanguage) {
this._data['content-language'] = contentLanguage;
return this;
}
/**
* Returns content-language
*
* @return {string} content-language
*/
getContentLanguage() {
return this._data['content-language'];
}
/**
* Set Creation Date
*
* @param {string} creationTime - Creation Date
* @return {ObjectMD} itself
*/
setCreationTime(creationTime) {
this._data['creation-time'] = creationTime;
return this;
}
/**
* Returns Creation Date
*
* @return {string} Creation Date
*/
getCreationTime() {
// If creation-time is not set fallback to LastModified
if (!this._data['creation-time']) {
return this.getLastModified();
}
return this._data['creation-time'];
}
/** /**
* Set version id * Set version id
* *
@ -653,29 +599,6 @@ class ObjectMD {
return reducedLocations; return reducedLocations;
} }
/**
* Set the Azure specific information
* @param {object} azureInfo - a plain JS structure representing the
* Azure specific information for a Blob or a Container (see constructor
* of {@link ObjectMDAzureInfo} for a description of the fields of this
* structure
* @return {ObjectMD} itself
*/
setAzureInfo(azureInfo) {
this._data.azureInfo = azureInfo;
return this;
}
/**
* Get the Azure specific information
* @return {object} a plain JS structure representing the Azure specific
* information for a Blob or a Container an suitable for the constructor
* of {@link ObjectMDAzureInfo}.
*/
getAzureInfo() {
return this._data.azureInfo;
}
/** /**
* Set metadata isNull value * Set metadata isNull value
* *
@ -757,19 +680,6 @@ class ObjectMD {
return this._data.isDeleteMarker || false; return this._data.isDeleteMarker || false;
} }
/**
* Get if the object is a multipart upload (MPU)
*
* The function checks the "content-md5" field: if it contains a
* dash ('-') it is a MPU, as the content-md5 string ends with
* "-[nbparts]" for MPUs.
*
* @return {boolean} Whether object is a multipart upload
*/
isMultipartUpload() {
return this.getContentMd5().includes('-');
}
/** /**
* Set metadata versionId value * Set metadata versionId value
* *
@ -797,11 +707,8 @@ class ObjectMD {
* @return {string|undefined} The encoded object versionId * @return {string|undefined} The encoded object versionId
*/ */
getEncodedVersionId() { getEncodedVersionId() {
if (this.getVersionId()) {
return VersionIDUtils.encode(this.getVersionId()); return VersionIDUtils.encode(this.getVersionId());
} }
return undefined;
}
/** /**
* Set metadata uploadId value * Set metadata uploadId value
@ -843,20 +750,6 @@ class ObjectMD {
return this._data.tags; return this._data.tags;
} }
getUserMetadata() {
const metaHeaders = {};
const data = this.getValue();
Object.keys(data).forEach(key => {
if (key.startsWith('x-amz-meta-')) {
metaHeaders[key] = data[key];
}
});
if (Object.keys(metaHeaders).length > 0) {
return JSON.stringify(metaHeaders);
}
return undefined;
}
/** /**
* Set replication information * Set replication information
* *
@ -865,7 +758,7 @@ class ObjectMD {
*/ */
setReplicationInfo(replicationInfo) { setReplicationInfo(replicationInfo) {
const { status, backends, content, destination, storageClass, role, const { status, backends, content, destination, storageClass, role,
storageType, dataStoreVersionId, isNFS } = replicationInfo; storageType, dataStoreVersionId } = replicationInfo;
this._data.replicationInfo = { this._data.replicationInfo = {
status, status,
backends, backends,
@ -875,7 +768,6 @@ class ObjectMD {
role, role,
storageType: storageType || '', storageType: storageType || '',
dataStoreVersionId: dataStoreVersionId || '', dataStoreVersionId: dataStoreVersionId || '',
isNFS: isNFS || null,
}; };
return this; return this;
} }
@ -894,24 +786,6 @@ class ObjectMD {
return this; return this;
} }
/**
* Set whether the replication is occurring from an NFS bucket.
* @param {Boolean} isNFS - Whether replication from an NFS bucket
* @return {ObjectMD} itself
*/
setReplicationIsNFS(isNFS) {
this._data.replicationInfo.isNFS = isNFS;
return this;
}
/**
* Get whether the replication is occurring from an NFS bucket.
* @return {Boolean} Whether replication from an NFS bucket
*/
getReplicationIsNFS() {
return this._data.replicationInfo.isNFS;
}
setReplicationSiteStatus(site, status) { setReplicationSiteStatus(site, status) {
const backend = this._data.replicationInfo.backends const backend = this._data.replicationInfo.backends
.find(o => o.site === site); .find(o => o.site === site);
@ -958,11 +832,6 @@ class ObjectMD {
return this; return this;
} }
setReplicationStorageType(storageType) {
this._data.replicationInfo.storageType = storageType;
return this;
}
setReplicationStorageClass(storageClass) { setReplicationStorageClass(storageClass) {
this._data.replicationInfo.storageClass = storageClass; this._data.replicationInfo.storageClass = storageClass;
return this; return this;
@ -1044,9 +913,6 @@ class ObjectMD {
Object.keys(metaHeaders).forEach(key => { Object.keys(metaHeaders).forEach(key => {
if (key.startsWith('x-amz-meta-')) { if (key.startsWith('x-amz-meta-')) {
this._data[key] = metaHeaders[key]; this._data[key] = metaHeaders[key];
} else if (key.startsWith('x-ms-meta-')) {
const _key = key.replace('x-ms-meta-', 'x-amz-meta-');
this._data[_key] = metaHeaders[key];
} }
}); });
// If a multipart object and the acl is already parsed, we update it // If a multipart object and the acl is already parsed, we update it
@ -1056,20 +922,6 @@ class ObjectMD {
return this; return this;
} }
/**
* Clear all existing meta headers (used for Azure)
*
* @return {ObjectMD} itself
*/
clearMetadataValues() {
Object.keys(this._data).forEach(key => {
if (key.startsWith('x-amz-meta')) {
delete this._data[key];
}
});
return this;
}
/** /**
* overrideMetadataValues (used for complete MPU and object copy) * overrideMetadataValues (used for complete MPU and object copy)
* *
@ -1081,39 +933,6 @@ class ObjectMD {
return this; return this;
} }
/**
* Create or update the microVersionId field
*
* This field can be used to force an update in MongoDB. This can
* be needed in the following cases:
*
* - in case no other metadata field changes
*
* - to detect a change when fields change but object version does
* not change e.g. when ingesting a putObjectTagging coming from
* S3C to Zenko
*
* - to manage conflicts during concurrent updates, using
* conditions on the microVersionId field.
*
* It's a field of 16 hexadecimal characters randomly generated
*
* @return {ObjectMD} itself
*/
updateMicroVersionId() {
this._data.microVersionId = crypto.randomBytes(8).toString('hex');
}
/**
* Get the microVersionId field, or null if not set
*
* @return {string|null} the microVersionId field if exists, or
* {null} if it does not exist
*/
getMicroVersionId() {
return this._data.microVersionId || null;
}
/** /**
* Set object legal hold status * Set object legal hold status
* @param {boolean} legalHold - true if legal hold is 'ON' false if 'OFF' * @param {boolean} legalHold - true if legal hold is 'ON' false if 'OFF'

View File

@ -1,162 +0,0 @@
/**
* Helper class to ease access to the Azure specific information for
* Blob and Container objects.
*/
class ObjectMDAzureInfo {
/**
* @constructor
* @param {object} obj - Raw structure for the Azure info on Blob/Container
* @param {string} obj.containerPublicAccess - Public access authorization
* type
* @param {object[]} obj.containerStoredAccessPolicies - Access policies
* for Shared Access Signature bearer
* @param {object} obj.containerImmutabilityPolicy - data immutability
* policy for this container
* @param {boolean} obj.containerLegalHoldStatus - legal hold status for
* this container
* @param {boolean} obj.containerDeletionInProgress - deletion in progress
* indicator for this container
* @param {string} obj.blobType - defines the type of blob for this object
* @param {string} obj.blobContentMD5 - whole object MD5 sum set by the
* client through the Azure API
* @param {string} obj.blobIssuedETag - backup of the issued ETag on MD only
* operations like Set Blob Properties and Set Blob Metadata
* @param {object} obj.blobCopyInfo - information pertaining to past and
* pending copy operation targeting this object
* @param {number} obj.blobSequenceNumber - sequence number for a PageBlob
* @param {Date} obj.blobAccessTierChangeTime - date of change of tier
* @param {boolean} obj.blobUncommitted - A block has been put for a
* nonexistent blob which is about to be created
*/
constructor(obj) {
this._data = {
containerPublicAccess: obj.containerPublicAccess,
containerStoredAccessPolicies: obj.containerStoredAccessPolicies,
containerImmutabilityPolicy: obj.containerImmutabilityPolicy,
containerLegalHoldStatus: obj.containerLegalHoldStatus,
containerDeletionInProgress: obj.containerDeletionInProgress,
blobType: obj.blobType,
blobContentMD5: obj.blobContentMD5,
blobIssuedETag: obj.blobIssuedETag,
blobCopyInfo: obj.blobCopyInfo,
blobSequenceNumber: obj.blobSequenceNumber,
blobAccessTierChangeTime: obj.blobAccessTierChangeTime,
blobUncommitted: obj.blobUncommitted,
};
}
getContainerPublicAccess() {
return this._data.containerPublicAccess;
}
setContainerPublicAccess(containerPublicAccess) {
this._data.containerPublicAccess = containerPublicAccess;
return this;
}
getContainerStoredAccessPolicies() {
return this._data.containerStoredAccessPolicies;
}
setContainerStoredAccessPolicies(containerStoredAccessPolicies) {
this._data.containerStoredAccessPolicies =
containerStoredAccessPolicies;
return this;
}
getContainerImmutabilityPolicy() {
return this._data.containerImmutabilityPolicy;
}
setContainerImmutabilityPolicy(containerImmutabilityPolicy) {
this._data.containerImmutabilityPolicy = containerImmutabilityPolicy;
return this;
}
getContainerLegalHoldStatus() {
return this._data.containerLegalHoldStatus;
}
setContainerLegalHoldStatus(containerLegalHoldStatus) {
this._data.containerLegalHoldStatus = containerLegalHoldStatus;
return this;
}
getContainerDeletionInProgress() {
return this._data.containerDeletionInProgress;
}
setContainerDeletionInProgress(containerDeletionInProgress) {
this._data.containerDeletionInProgress = containerDeletionInProgress;
return this;
}
getBlobType() {
return this._data.blobType;
}
setBlobType(blobType) {
this._data.blobType = blobType;
return this;
}
getBlobContentMD5() {
return this._data.blobContentMD5;
}
setBlobContentMD5(blobContentMD5) {
this._data.blobContentMD5 = blobContentMD5;
return this;
}
getBlobIssuedETag() {
return this._data.blobIssuedETag;
}
setBlobIssuedETag(blobIssuedETag) {
this._data.blobIssuedETag = blobIssuedETag;
return this;
}
getBlobCopyInfo() {
return this._data.blobCopyInfo;
}
setBlobCopyInfo(blobCopyInfo) {
this._data.blobCopyInfo = blobCopyInfo;
return this;
}
getBlobSequenceNumber() {
return this._data.blobSequenceNumber;
}
setBlobSequenceNumber(blobSequenceNumber) {
this._data.blobSequenceNumber = blobSequenceNumber;
return this;
}
getBlobAccessTierChangeTime() {
return this._data.blobAccessTierChangeTime;
}
setBlobAccessTierChangeTime(blobAccessTierChangeTime) {
this._data.blobAccessTierChangeTime = blobAccessTierChangeTime;
return this;
}
getBlobUncommitted() {
return this._data.blobUncommitted;
}
setBlobUncommitted(blobUncommitted) {
this._data.blobUncommitted = blobUncommitted;
return this;
}
getValue() {
return this._data;
}
}
module.exports = ObjectMDAzureInfo;

View File

@ -3,6 +3,7 @@
* 'location' array * 'location' array
*/ */
class ObjectMDLocation { class ObjectMDLocation {
/** /**
* @constructor * @constructor
* @param {object} locationObj - single data location info * @param {object} locationObj - single data location info
@ -13,14 +14,10 @@ class ObjectMDLocation {
* @param {string} locationObj.dataStoreName - type of data store * @param {string} locationObj.dataStoreName - type of data store
* @param {string} locationObj.dataStoreETag - internal ETag of * @param {string} locationObj.dataStoreETag - internal ETag of
* data part * data part
* @param {string} [locationObj.dataStoreVersionId] - versionId,
* needed for cloud backends
* @param {number} [location.cryptoScheme] - if location data is * @param {number} [location.cryptoScheme] - if location data is
* encrypted: the encryption scheme version * encrypted: the encryption scheme version
* @param {string} [location.cipheredDataKey] - if location data * @param {string} [location.cipheredDataKey] - if location data
* is encrypted: the base64-encoded ciphered data key * is encrypted: the base64-encoded ciphered data key
* @param {string} [locationObj.blockId] - blockId of the part,
* set by the Azure Blob Service REST API frontend
*/ */
constructor(locationObj) { constructor(locationObj) {
this._data = { this._data = {
@ -29,8 +26,6 @@ class ObjectMDLocation {
size: locationObj.size, size: locationObj.size,
dataStoreName: locationObj.dataStoreName, dataStoreName: locationObj.dataStoreName,
dataStoreETag: locationObj.dataStoreETag, dataStoreETag: locationObj.dataStoreETag,
dataStoreVersionId: locationObj.dataStoreVersionId,
blockId: locationObj.blockId,
}; };
if (locationObj.cryptoScheme) { if (locationObj.cryptoScheme) {
this._data.cryptoScheme = locationObj.cryptoScheme; this._data.cryptoScheme = locationObj.cryptoScheme;
@ -52,7 +47,6 @@ class ObjectMDLocation {
* @param {object} location - single data location info * @param {object} location - single data location info
* @param {string} location.key - data backend key * @param {string} location.key - data backend key
* @param {string} location.dataStoreName - type of data store * @param {string} location.dataStoreName - type of data store
* @param {string} [location.dataStoreVersionId] - data backend version ID
* @param {number} [location.cryptoScheme] - if location data is * @param {number} [location.cryptoScheme] - if location data is
* encrypted: the encryption scheme version * encrypted: the encryption scheme version
* @param {string} [location.cipheredDataKey] - if location data * @param {string} [location.cipheredDataKey] - if location data
@ -63,7 +57,6 @@ class ObjectMDLocation {
[ [
'key', 'key',
'dataStoreName', 'dataStoreName',
'dataStoreVersionId',
'cryptoScheme', 'cryptoScheme',
'cipheredDataKey', 'cipheredDataKey',
].forEach(attrName => { ].forEach(attrName => {
@ -80,10 +73,6 @@ class ObjectMDLocation {
return this._data.dataStoreETag; return this._data.dataStoreETag;
} }
getDataStoreVersionId() {
return this._data.dataStoreVersionId;
}
getPartNumber() { getPartNumber() {
return Number.parseInt(this._data.dataStoreETag.split(':')[0], 10); return Number.parseInt(this._data.dataStoreETag.split(':')[0], 10);
} }
@ -118,15 +107,6 @@ class ObjectMDLocation {
return this._data.cipheredDataKey; return this._data.cipheredDataKey;
} }
getBlockId() {
return this._data.blockId;
}
setBlockId(blockId) {
this._data.blockId = blockId;
return this;
}
getValue() { getValue() {
return this._data; return this._data;
} }

View File

@ -59,7 +59,6 @@ class ReplicationConfiguration {
this._rules = null; this._rules = null;
this._prevStorageClass = null; this._prevStorageClass = null;
this._hasScalityDestination = null; this._hasScalityDestination = null;
this._preferredReadLocation = null;
} }
/** /**
@ -86,18 +85,6 @@ class ReplicationConfiguration {
return this._rules; return this._rules;
} }
/**
* The preferred read location
* @return {string|null} - The preferred read location if defined,
* otherwise null
*
* FIXME ideally we should be able to specify one preferred read
* location for each rule
*/
getPreferredReadLocation() {
return this._preferredReadLocation;
}
/** /**
* Get the replication configuration * Get the replication configuration
* @return {object} - The replication configuration * @return {object} - The replication configuration
@ -107,7 +94,6 @@ class ReplicationConfiguration {
role: this.getRole(), role: this.getRole(),
destination: this.getDestination(), destination: this.getDestination(),
rules: this.getRules(), rules: this.getRules(),
preferredReadLocation: this.getPreferredReadLocation(),
}; };
} }
@ -306,14 +292,6 @@ class ReplicationConfiguration {
return undefined; return undefined;
} }
const storageClasses = destination.StorageClass[0].split(','); const storageClasses = destination.StorageClass[0].split(',');
const prefReadIndex = storageClasses.findIndex(storageClass =>
storageClass.endsWith(':preferred_read'));
if (prefReadIndex !== -1) {
const prefRead = storageClasses[prefReadIndex].split(':')[0];
// remove :preferred_read tag from storage class name
storageClasses[prefReadIndex] = prefRead;
this._preferredReadLocation = prefRead;
}
const isValidStorageClass = storageClasses.every(storageClass => { const isValidStorageClass = storageClasses.every(storageClass => {
if (validStorageClasses.includes(storageClass)) { if (validStorageClasses.includes(storageClass)) {
this._hasScalityDestination = this._hasScalityDestination =

View File

@ -10,6 +10,7 @@ const { checkSupportIPv6 } = require('./utils');
class Server { class Server {
/** /**
* @constructor * @constructor
* *
@ -368,8 +369,6 @@ class Server {
error: err.stack || err, error: err.stack || err,
address: sock.address(), address: sock.address(),
}); });
// socket is not systematically destroyed
sock.destroy();
} }
/** /**

View File

@ -342,6 +342,8 @@ class KMIP {
return cb(null, response); return cb(null, response);
}); });
} }
} }

View File

@ -1,76 +0,0 @@
const httpServer = require('../http/server');
const werelogs = require('werelogs');
const errors = require('../../errors');
const ZenkoMetrics = require('../../metrics/ZenkoMetrics');
const { sendSuccess, sendError } = require('./Utils');
function checkStub(log) { // eslint-disable-line
return true;
}
class HealthProbeServer extends httpServer {
constructor(params) {
const logging = new werelogs.Logger('HealthProbeServer');
super(params.port, logging);
this.logging = logging;
this.setBindAddress(params.bindAddress || 'localhost');
// hooking our request processing function by calling the
// parent's method for that
this.onRequest(this._onRequest);
this._reqHandlers = {
'/_/health/liveness': this._onLiveness.bind(this),
'/_/health/readiness': this._onReadiness.bind(this),
'/_/monitoring/metrics': this._onMetrics.bind(this),
};
this._livenessCheck = params.livenessCheck || checkStub;
this._readinessCheck = params.readinessCheck || checkStub;
}
onLiveCheck(f) {
this._livenessCheck = f;
}
onReadyCheck(f) {
this._readinessCheck = f;
}
_onRequest(req, res) {
const log = this.logging.newRequestLogger();
log.debug('request received', { method: req.method,
url: req.url });
if (req.method !== 'GET') {
sendError(res, log, errors.MethodNotAllowed);
} else if (req.url in this._reqHandlers) {
this._reqHandlers[req.url](req, res, log);
} else {
sendError(res, log, errors.InvalidURI);
}
}
_onLiveness(req, res, log) {
if (this._livenessCheck(log)) {
sendSuccess(res, log);
} else {
sendError(res, log, errors.ServiceUnavailable);
}
}
_onReadiness(req, res, log) {
if (this._readinessCheck(log)) {
sendSuccess(res, log);
} else {
sendError(res, log, errors.ServiceUnavailable);
}
}
// expose metrics to Prometheus
_onMetrics(req, res) {
res.writeHead(200, {
'Content-Type': ZenkoMetrics.asPrometheusContentType(),
});
res.end(ZenkoMetrics.asPrometheus());
}
}
module.exports = HealthProbeServer;

View File

@ -3,17 +3,19 @@ const werelogs = require('werelogs');
const errors = require('../../errors'); const errors = require('../../errors');
const DEFAULT_LIVE_ROUTE = '/_/live'; const DEFAULT_LIVE_ROUTE = '/_/live';
const DEFAULT_READY_ROUTE = '/_/ready'; const DEFAULT_READY_ROUTE = '/_/live';
const DEFAULT_METRICS_ROUTE = '/metrics'; const DEFAULT_METRICS_ROUTE = '/_/metrics';
/** /**
* ProbeDelegate is used to handle probe checks. * ProbeDelegate is used to determine if a probe is successful or
* You can sendSuccess and sendError from Utils to handle success * if any errors are present.
* and failure conditions. * If everything is working as intended, it is a no-op.
* Otherwise, return a string representing what is failing.
* @callback ProbeDelegate * @callback ProbeDelegate
* @param { import('http').ServerResponse } res - HTTP response for writing * @param { import('http').ServerResponse } res - HTTP response for writing
* @param {werelogs.Logger} log - Werelogs instance for logging if you choose to * @param {werelogs.Logger} log - Werelogs instance for logging if you choose to
* @return {undefined} * @return {(string|undefined)} String representing issues to report. An empty
* string or undefined is used to represent no issues.
*/ */
/** /**
@ -89,7 +91,13 @@ class ProbeServer extends httpServer {
return; return;
} }
this._handlers.get(req.url)(res, log); const probeResponse = this._handlers.get(req.url)(res, log);
if (probeResponse !== undefined && probeResponse !== '') {
// Return an internal error with the response
errors.InternalError
.customizeDescription(probeResponse)
.writeResponse(res);
}
} }
} }

View File

@ -1,41 +0,0 @@
/**
* Send a successful HTTP response of 200 OK
* @param {http.ServerResponse} res - HTTP response for writing
* @param {werelogs.Logger} log - Werelogs instance for logging if you choose to
* @param {string} [message] - Message to send as response, defaults to OK
* @returns {undefined}
*/
function sendSuccess(res, log, message = 'OK') {
log.debug('replying with success');
res.writeHead(200);
res.end(message);
}
/**
* Send an Arsenal Error response
* @param {http.ServerResponse} res - HTTP response for writing
* @param {werelogs.Logger} log - Werelogs instance for logging if you choose to
* @param {ArsenalError} error - Error to send back to the user
* @param {string} [optMessage] - Message to use instead of the errors message
* @returns {undefined}
*/
function sendError(res, log, error, optMessage) {
const message = optMessage || error.description || '';
log.debug('sending back error response',
{
httpCode: error.code,
errorType: error.message,
error: message,
},
);
res.writeHead(error.code);
res.end(JSON.stringify({
errorType: error.message,
errorMessage: message,
}));
}
module.exports = {
sendSuccess,
sendError,
};

View File

@ -81,7 +81,6 @@ class RESTClient {
this.host = params.host; this.host = params.host;
this.port = params.port; this.port = params.port;
this.isPassthrough = params.isPassthrough || false;
this.setupLogging(params.logApi); this.setupLogging(params.logApi);
this.httpAgent = new HttpAgent({ this.httpAgent = new HttpAgent({
keepAlive: true, keepAlive: true,
@ -120,13 +119,11 @@ class RESTClient {
doRequest(method, headers, key, log, responseCb) { doRequest(method, headers, key, log, responseCb) {
const reqHeaders = headers || {}; const reqHeaders = headers || {};
const urlKey = key || ''; const urlKey = key || '';
const prefix = this.isPassthrough ?
constants.passthroughFileURL : constants.dataFileURL;
const reqParams = { const reqParams = {
hostname: this.host, hostname: this.host,
port: this.port, port: this.port,
method, method,
path: encodeURI(`${prefix}/${urlKey}`), path: `${constants.dataFileURL}/${urlKey}`,
headers: reqHeaders, headers: reqHeaders,
agent: this.httpAgent, agent: this.httpAgent,
}; };

View File

@ -7,7 +7,7 @@ const werelogs = require('werelogs');
const httpServer = require('../http/server'); const httpServer = require('../http/server');
const constants = require('../../constants'); const constants = require('../../constants');
const { parseURL } = require('./utils'); const utils = require('./utils');
const httpUtils = require('../http/utils'); const httpUtils = require('../http/utils');
const errors = require('../../errors'); const errors = require('../../errors');
@ -37,6 +37,42 @@ function sendError(res, log, error, optMessage) {
errorMessage: message })}\n`); errorMessage: message })}\n`);
} }
/**
* Parse the given url and return a pathInfo object. Sanity checks are
* performed.
*
* @param {String} urlStr - URL to parse
* @param {Boolean} expectKey - whether the command expects to see a
* key in the URL
* @return {Object} a pathInfo object with URL items containing the
* following attributes:
* - pathInfo.service {String} - The name of REST service ("DataFile")
* - pathInfo.key {String} - The requested key
*/
function parseURL(urlStr, expectKey) {
const urlObj = url.parse(urlStr);
const pathInfo = utils.explodePath(urlObj.path);
if (pathInfo.service !== constants.dataFileURL) {
throw errors.InvalidAction.customizeDescription(
`unsupported service '${pathInfo.service}'`);
}
if (expectKey && pathInfo.key === undefined) {
throw errors.MissingParameter.customizeDescription(
'URL is missing key');
}
if (!expectKey && pathInfo.key !== undefined) {
// note: we may implement rewrite functionality by allowing a
// key in the URL, though we may still provide the new key in
// the Location header to keep immutability property and
// atomicity of the update (we would just remove the old
// object when the new one has been written entirely in this
// case, saving a request over an equivalent PUT + DELETE).
throw errors.InvalidURI.customizeDescription(
'PUT url cannot contain a key');
}
return pathInfo;
}
/** /**
* @class * @class
* @classdesc REST Server interface * @classdesc REST Server interface
@ -45,6 +81,7 @@ function sendError(res, log, error, optMessage) {
* start() to start listening to the configured port. * start() to start listening to the configured port.
*/ */
class RESTServer extends httpServer { class RESTServer extends httpServer {
/** /**
* @constructor * @constructor
* @param {Object} params - constructor params * @param {Object} params - constructor params

View File

@ -1,19 +1,8 @@
'use strict'; // eslint-disable-line 'use strict'; // eslint-disable-line
const errors = require('../../errors'); const errors = require('../../errors');
const constants = require('../../constants');
const url = require('url');
const passthroughPrefixLength = constants.passthroughFileURL.length; module.exports.explodePath = function explodePath(path) {
function explodePath(path) {
if (path.startsWith(constants.passthroughFileURL)) {
const key = path.slice(passthroughPrefixLength + 1);
return {
service: constants.passthroughFileURL,
key: key.length > 0 ? key : undefined,
};
}
const pathMatch = /^(\/[a-zA-Z0-9]+)(\/([0-9a-f]*))?$/.exec(path); const pathMatch = /^(\/[a-zA-Z0-9]+)(\/([0-9a-f]*))?$/.exec(path);
if (pathMatch) { if (pathMatch) {
return { return {
@ -23,46 +12,4 @@ function explodePath(path) {
}; };
} }
throw errors.InvalidURI.customizeDescription('malformed URI'); throw errors.InvalidURI.customizeDescription('malformed URI');
}
/**
* Parse the given url and return a pathInfo object. Sanity checks are
* performed.
*
* @param {String} urlStr - URL to parse
* @param {Boolean} expectKey - whether the command expects to see a
* key in the URL
* @return {Object} a pathInfo object with URL items containing the
* following attributes:
* - pathInfo.service {String} - The name of REST service ("DataFile")
* - pathInfo.key {String} - The requested key
*/
function parseURL(urlStr, expectKey) {
const urlObj = url.parse(urlStr);
const pathInfo = explodePath(decodeURI(urlObj.path));
if ((pathInfo.service !== constants.dataFileURL)
&& (pathInfo.service !== constants.passthroughFileURL)) {
throw errors.InvalidAction.customizeDescription(
`unsupported service '${pathInfo.service}'`);
}
if (expectKey && pathInfo.key === undefined) {
throw errors.MissingParameter.customizeDescription(
'URL is missing key');
}
if (!expectKey && pathInfo.key !== undefined) {
// note: we may implement rewrite functionality by allowing a
// key in the URL, though we may still provide the new key in
// the Location header to keep immutability property and
// atomicity of the update (we would just remove the old
// object when the new one has been written entirely in this
// case, saving a request over an equivalent PUT + DELETE).
throw errors.InvalidURI.customizeDescription(
'PUT url cannot contain a key');
}
return pathInfo;
}
module.exports = {
explodePath,
parseURL,
}; };

View File

@ -17,6 +17,7 @@ const rpc = require('./rpc.js');
* RPC client object accessing the sub-level transparently. * RPC client object accessing the sub-level transparently.
*/ */
class LevelDbClient extends rpc.BaseClient { class LevelDbClient extends rpc.BaseClient {
/** /**
* @constructor * @constructor
* *
@ -77,6 +78,7 @@ class LevelDbClient extends rpc.BaseClient {
* env.subDb (env is passed as first parameter of received RPC calls). * env.subDb (env is passed as first parameter of received RPC calls).
*/ */
class LevelDbService extends rpc.BaseService { class LevelDbService extends rpc.BaseService {
/** /**
* @constructor * @constructor
* *

View File

@ -37,6 +37,7 @@ let streamRPCJSONObj;
* an error occurred). * an error occurred).
*/ */
class BaseClient extends EventEmitter { class BaseClient extends EventEmitter {
/** /**
* @constructor * @constructor
* *
@ -250,6 +251,7 @@ class BaseClient extends EventEmitter {
* *
*/ */
class BaseService { class BaseService {
/** /**
* @constructor * @constructor
* *

View File

@ -1,159 +0,0 @@
'use strict'; // eslint-disable-line strict
const { URL } = require('url');
const { decryptSecret } = require('../executables/pensieveCreds/utils');
function patchLocations(overlayLocations, creds, log) {
if (!overlayLocations) {
return {};
}
const locations = {};
Object.keys(overlayLocations).forEach(k => {
const l = overlayLocations[k];
const location = {
name: k,
objectId: l.objectId,
details: l.details || {},
locationType: l.locationType,
};
let supportsVersioning = false;
let pathStyle = process.env.CI_CEPH !== undefined;
switch (l.locationType) {
case 'location-mem-v1':
location.type = 'mem';
location.details = { supportsVersioning: true };
break;
case 'location-file-v1':
location.type = 'file';
location.details = { supportsVersioning: true };
break;
case 'location-azure-v1':
location.type = 'azure';
if (l.details.secretKey && l.details.secretKey.length > 0) {
location.details = {
bucketMatch: l.details.bucketMatch,
azureStorageEndpoint: l.details.endpoint,
azureStorageAccountName: l.details.accessKey,
azureStorageAccessKey: decryptSecret(creds,
l.details.secretKey),
azureContainerName: l.details.bucketName,
};
}
break;
case 'location-ceph-radosgw-s3-v1':
case 'location-scality-ring-s3-v1':
pathStyle = true; // fallthrough
case 'location-aws-s3-v1':
case 'location-wasabi-v1':
supportsVersioning = true; // fallthrough
case 'location-do-spaces-v1':
location.type = 'aws_s3';
if (l.details.secretKey && l.details.secretKey.length > 0) {
let https = true;
let awsEndpoint = l.details.endpoint ||
's3.amazonaws.com';
if (awsEndpoint.includes('://')) {
const url = new URL(awsEndpoint);
awsEndpoint = url.host;
https = url.protocol.includes('https');
}
location.details = {
credentials: {
accessKey: l.details.accessKey,
secretKey: decryptSecret(creds,
l.details.secretKey),
},
bucketName: l.details.bucketName,
bucketMatch: l.details.bucketMatch,
serverSideEncryption:
Boolean(l.details.serverSideEncryption),
region: l.details.region,
awsEndpoint,
supportsVersioning,
pathStyle,
https,
};
}
break;
case 'location-gcp-v1':
location.type = 'gcp';
if (l.details.secretKey && l.details.secretKey.length > 0) {
location.details = {
credentials: {
accessKey: l.details.accessKey,
secretKey: decryptSecret(creds,
l.details.secretKey),
},
bucketName: l.details.bucketName,
mpuBucketName: l.details.mpuBucketName,
bucketMatch: l.details.bucketMatch,
gcpEndpoint: l.details.endpoint ||
'storage.googleapis.com',
https: true,
};
}
break;
case 'location-scality-sproxyd-v1':
location.type = 'scality';
if (l.details && l.details.bootstrapList &&
l.details.proxyPath) {
location.details = {
supportsVersioning: true,
connector: {
sproxyd: {
chordCos: l.details.chordCos || null,
bootstrap: l.details.bootstrapList,
path: l.details.proxyPath,
},
},
};
}
break;
case 'location-nfs-mount-v1':
location.type = 'pfs';
if (l.details) {
location.details = {
supportsVersioning: true,
bucketMatch: true,
pfsDaemonEndpoint: {
host: `${l.name}-cosmos-pfsd`,
port: 80,
},
};
}
break;
case 'location-scality-hdclient-v2':
location.type = 'scality';
if (l.details && l.details.bootstrapList) {
location.details = {
supportsVersioning: true,
connector: {
hdclient: {
bootstrap: l.details.bootstrapList,
},
},
};
}
break;
default:
log.info(
'unknown location type',
{ locationType: l.locationType },
);
return;
}
location.sizeLimitGB = l.sizeLimitGB || null;
location.isTransient = Boolean(l.isTransient);
location.legacyAwsBehavior = Boolean(l.legacyAwsBehavior);
locations[location.name] = location;
return;
});
return locations;
}
module.exports = {
patchLocations,
};

View File

@ -38,10 +38,6 @@
"type": "string", "type": "string",
"pattern": "^arn:aws:iam::[0-9]{12}:saml-provider/[\\w._-]{1,128}$" "pattern": "^arn:aws:iam::[0-9]{12}:saml-provider/[\\w._-]{1,128}$"
}, },
"principalFederatedOidcIdp": {
"type": "string",
"pattern": "^(?:http(s)?:\/\/)?[\\w.-]+(?:\\.[\\w\\.-]+)+[\\w\\-\\._~:/?#[\\]@!\\$&'\\(\\)\\*\\+,;=.]+$"
},
"principalAWSItem": { "principalAWSItem": {
"type": "object", "type": "object",
"properties": { "properties": {
@ -102,9 +98,6 @@
"oneOf": [ "oneOf": [
{ {
"$ref": "#/definitions/principalFederatedSamlIdp" "$ref": "#/definitions/principalFederatedSamlIdp"
},
{
"$ref": "#/definitions/principalFederatedOidcIdp"
} }
] ]
} }

View File

@ -30,7 +30,6 @@ const sharedActionMap = {
bypassGovernanceRetention: 's3:BypassGovernanceRetention', bypassGovernanceRetention: 's3:BypassGovernanceRetention',
listMultipartUploads: 's3:ListBucketMultipartUploads', listMultipartUploads: 's3:ListBucketMultipartUploads',
listParts: 's3:ListMultipartUploadParts', listParts: 's3:ListMultipartUploadParts',
metadataSearch: 's3:MetadataSearch',
multipartDelete: 's3:AbortMultipartUpload', multipartDelete: 's3:AbortMultipartUpload',
objectDelete: 's3:DeleteObject', objectDelete: 's3:DeleteObject',
objectDeleteTagging: 's3:DeleteObjectTagging', objectDeleteTagging: 's3:DeleteObjectTagging',
@ -117,7 +116,6 @@ const actionMonitoringMapS3 = {
initiateMultipartUpload: 'CreateMultipartUpload', initiateMultipartUpload: 'CreateMultipartUpload',
listMultipartUploads: 'ListMultipartUploads', listMultipartUploads: 'ListMultipartUploads',
listParts: 'ListParts', listParts: 'ListParts',
metadataSearch: 'MetadataSearch',
multiObjectDelete: 'DeleteObjects', multiObjectDelete: 'DeleteObjects',
multipartDelete: 'AbortMultipartUpload', multipartDelete: 'AbortMultipartUpload',
objectCopy: 'CopyObject', objectCopy: 'CopyObject',
@ -161,7 +159,6 @@ const actionMapIAM = {
getPolicyVersion: 'iam:GetPolicyVersion', getPolicyVersion: 'iam:GetPolicyVersion',
getUser: 'iam:GetUser', getUser: 'iam:GetUser',
listAccessKeys: 'iam:ListAccessKeys', listAccessKeys: 'iam:ListAccessKeys',
listEntitiesForPolicy: 'iam:ListEntitiesForPolicy',
listGroupPolicies: 'iam:ListGroupPolicies', listGroupPolicies: 'iam:ListGroupPolicies',
listGroups: 'iam:ListGroups', listGroups: 'iam:ListGroups',
listGroupsForUser: 'iam:ListGroupsForUser', listGroupsForUser: 'iam:ListGroupsForUser',

View File

@ -146,8 +146,6 @@ conditions.findConditionKey = (key, requestContext) => {
map.set('s3:ObjLocationConstraint', map.set('s3:ObjLocationConstraint',
headers['x-amz-meta-scal-location-constraint']); headers['x-amz-meta-scal-location-constraint']);
map.set('sts:ExternalId', requestContext.getRequesterExternalId()); map.set('sts:ExternalId', requestContext.getRequesterExternalId());
map.set('keycloak:groups', requesterInfo.keycloakGroup);
map.set('keycloak:roles', requesterInfo.keycloakRole);
map.set('iam:PolicyArn', requestContext.getPolicyArn()); map.set('iam:PolicyArn', requestContext.getPolicyArn());
// s3:ExistingObjectTag - Used to check that existing object tag has // s3:ExistingObjectTag - Used to check that existing object tag has
// specific tag key and value. Extraction of correct tag key is done in CloudServer. // specific tag key and value. Extraction of correct tag key is done in CloudServer.

View File

@ -6,6 +6,7 @@ const crypto = require('crypto');
* data through a stream * data through a stream
*/ */
class MD5Sum extends Transform { class MD5Sum extends Transform {
/** /**
* @constructor * @constructor
*/ */
@ -39,6 +40,7 @@ class MD5Sum extends Transform {
this.emit('hashed'); this.emit('hashed');
callback(null); callback(null);
} }
} }
module.exports = MD5Sum; module.exports = MD5Sum;

View File

@ -121,7 +121,7 @@ azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName,
return cb(errors.BadDigest); return cb(errors.BadDigest);
} }
return cb(errors.InternalError.customizeDescription( return cb(errors.InternalError.customizeDescription(
`Error returned from Azure: ${err.message}`), `Error returned from Azure: ${err.message}`)
); );
} }
const md5 = result.headers['content-md5'] || ''; const md5 = result.headers['content-md5'] || '';

View File

@ -33,7 +33,7 @@ convertMethods.listMultipartUploads = xmlParams => {
xml.push('<?xml version="1.0" encoding="UTF-8"?>', xml.push('<?xml version="1.0" encoding="UTF-8"?>',
'<ListMultipartUploadsResult ' + '<ListMultipartUploadsResult ' +
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">', 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`, `<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`
); );
// For certain XML elements, if it is `undefined`, AWS returns either an // For certain XML elements, if it is `undefined`, AWS returns either an
@ -58,7 +58,7 @@ convertMethods.listMultipartUploads = xmlParams => {
}); });
xml.push(`<MaxUploads>${escapeForXml(l.MaxKeys)}</MaxUploads>`, xml.push(`<MaxUploads>${escapeForXml(l.MaxKeys)}</MaxUploads>`,
`<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`, `<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`
); );
l.Uploads.forEach(upload => { l.Uploads.forEach(upload => {
@ -84,14 +84,14 @@ convertMethods.listMultipartUploads = xmlParams => {
`<StorageClass>${escapeForXml(val.StorageClass)}` + `<StorageClass>${escapeForXml(val.StorageClass)}` +
'</StorageClass>', '</StorageClass>',
`<Initiated>${escapeForXml(val.Initiated)}</Initiated>`, `<Initiated>${escapeForXml(val.Initiated)}</Initiated>`,
'</Upload>', '</Upload>'
); );
}); });
l.CommonPrefixes.forEach(prefix => { l.CommonPrefixes.forEach(prefix => {
xml.push('<CommonPrefixes>', xml.push('<CommonPrefixes>',
`<Prefix>${escapeForXml(prefix)}</Prefix>`, `<Prefix>${escapeForXml(prefix)}</Prefix>`,
'</CommonPrefixes>', '</CommonPrefixes>'
); );
}); });

View File

@ -5,6 +5,7 @@ const Readable = require('stream').Readable;
* This class is used to produce zeros filled buffers for a reader consumption * This class is used to produce zeros filled buffers for a reader consumption
*/ */
class NullStream extends Readable { class NullStream extends Readable {
/** /**
* Construct a new zeros filled buffers producer that will * Construct a new zeros filled buffers producer that will
* produce as much bytes as specified by the range parameter, or the size * produce as much bytes as specified by the range parameter, or the size

View File

@ -68,31 +68,6 @@ function _checkUnmodifiedSince(ifUnmodifiedSinceTime, lastModified) {
return res; return res;
} }
/**
* checks 'if-modified-since' and 'if-unmodified-since' headers if included in
* request against last-modified date of object
* @param {object} headers - headers from request object
* @param {string} lastModified - last modified date of object
* @return {object} contains modifiedSince and unmodifiedSince res objects
*/
function checkDateModifiedHeaders(headers, lastModified) {
let lastModifiedDate = new Date(lastModified);
lastModifiedDate.setMilliseconds(0);
lastModifiedDate = lastModifiedDate.getTime();
const ifModifiedSinceHeader = headers['if-modified-since'] ||
headers['x-amz-copy-source-if-modified-since'];
const ifUnmodifiedSinceHeader = headers['if-unmodified-since'] ||
headers['x-amz-copy-source-if-unmodified-since'];
const modifiedSinceRes = _checkModifiedSince(ifModifiedSinceHeader,
lastModifiedDate);
const unmodifiedSinceRes = _checkUnmodifiedSince(ifUnmodifiedSinceHeader,
lastModifiedDate);
return { modifiedSinceRes, unmodifiedSinceRes };
}
/** /**
* validateConditionalHeaders - validates 'if-modified-since', * validateConditionalHeaders - validates 'if-modified-since',
* 'if-unmodified-since', 'if-match' or 'if-none-match' headers if included in * 'if-unmodified-since', 'if-match' or 'if-none-match' headers if included in
@ -104,14 +79,23 @@ function checkDateModifiedHeaders(headers, lastModified) {
* empty object if no error * empty object if no error
*/ */
function validateConditionalHeaders(headers, lastModified, contentMD5) { function validateConditionalHeaders(headers, lastModified, contentMD5) {
let lastModifiedDate = new Date(lastModified);
lastModifiedDate.setMilliseconds(0);
lastModifiedDate = lastModifiedDate.getTime();
const ifMatchHeader = headers['if-match'] || const ifMatchHeader = headers['if-match'] ||
headers['x-amz-copy-source-if-match']; headers['x-amz-copy-source-if-match'];
const ifNoneMatchHeader = headers['if-none-match'] || const ifNoneMatchHeader = headers['if-none-match'] ||
headers['x-amz-copy-source-if-none-match']; headers['x-amz-copy-source-if-none-match'];
const ifModifiedSinceHeader = headers['if-modified-since'] ||
headers['x-amz-copy-source-if-modified-since'];
const ifUnmodifiedSinceHeader = headers['if-unmodified-since'] ||
headers['x-amz-copy-source-if-unmodified-since'];
const etagMatchRes = _checkEtagMatch(ifMatchHeader, contentMD5); const etagMatchRes = _checkEtagMatch(ifMatchHeader, contentMD5);
const etagNoneMatchRes = _checkEtagNoneMatch(ifNoneMatchHeader, contentMD5); const etagNoneMatchRes = _checkEtagNoneMatch(ifNoneMatchHeader, contentMD5);
const { modifiedSinceRes, unmodifiedSinceRes } = const modifiedSinceRes = _checkModifiedSince(ifModifiedSinceHeader,
checkDateModifiedHeaders(headers, lastModified); lastModifiedDate);
const unmodifiedSinceRes = _checkUnmodifiedSince(ifUnmodifiedSinceHeader,
lastModifiedDate);
// If-Unmodified-Since condition evaluates to false and If-Match // If-Unmodified-Since condition evaluates to false and If-Match
// is not present, then return the error. Otherwise, If-Unmodified-Since is // is not present, then return the error. Otherwise, If-Unmodified-Since is
// silent when If-Match match, and when If-Match does not match, it's the // silent when If-Match match, and when If-Match does not match, it's the
@ -136,6 +120,5 @@ module.exports = {
_checkEtagNoneMatch, _checkEtagNoneMatch,
_checkModifiedSince, _checkModifiedSince,
_checkUnmodifiedSince, _checkUnmodifiedSince,
checkDateModifiedHeaders,
validateConditionalHeaders, validateConditionalHeaders,
}; };

View File

@ -10,7 +10,6 @@ const routeOPTIONS = require('./routes/routeOPTIONS');
const routesUtils = require('./routesUtils'); const routesUtils = require('./routesUtils');
const routeWebsite = require('./routes/routeWebsite'); const routeWebsite = require('./routes/routeWebsite');
const { objectKeyByteLimit } = require('../constants');
const requestUtils = require('../../lib/policyEvaluator/requestUtils'); const requestUtils = require('../../lib/policyEvaluator/requestUtils');
const routeMap = { const routeMap = {
@ -58,14 +57,8 @@ function checkBucketAndKey(bucketName, objectKey, method, reqQuery,
blacklistedPrefixes.object); blacklistedPrefixes.object);
if (!result.isValid) { if (!result.isValid) {
log.debug('invalid object key', { objectKey }); log.debug('invalid object key', { objectKey });
if (result.invalidPrefix) { return errors.InvalidArgument.customizeDescription('Object key ' +
return errors.InvalidArgument.customizeDescription('Invalid ' + `must not start with "${result.invalidPrefix}".`);
'prefix - object key cannot start with ' +
`"${result.invalidPrefix}".`);
}
return errors.KeyTooLong.customizeDescription('Object key is too ' +
'long. Maximum number of bytes allowed in keys is ' +
`${objectKeyByteLimit}.`);
} }
} }
if ((reqQuery.partNumber || reqQuery.uploadId) if ((reqQuery.partNumber || reqQuery.uploadId)
@ -122,8 +115,8 @@ function checkTypes(req, res, params, logger, s3config) {
assert.strictEqual(typeof pre, 'string', assert.strictEqual(typeof pre, 'string',
'bad routes param: each blacklisted object prefix must be a string'); 'bad routes param: each blacklisted object prefix must be a string');
}); });
assert.strictEqual(typeof params.dataRetrievalParams, 'object', assert.strictEqual(typeof params.dataRetrievalFn, 'function',
'bad routes param: dataRetrievalParams must be a defined object'); 'bad routes param: dataRetrievalFn must be a defined function');
if (s3config) { if (s3config) {
assert.strictEqual(typeof s3config, 'object', 'bad routes param: s3config must be an object'); assert.strictEqual(typeof s3config, 'object', 'bad routes param: s3config must be an object');
} }
@ -145,8 +138,7 @@ function checkTypes(req, res, params, logger, s3config) {
* @param {string[]} params.blacklistedPrefixes.object - object prefixes * @param {string[]} params.blacklistedPrefixes.object - object prefixes
* @param {object} params.unsupportedQueries - object containing true/false * @param {object} params.unsupportedQueries - object containing true/false
* values for whether queries are supported * values for whether queries are supported
* @param {function} params.dataRetrievalParams - params to create instance of * @param {function} params.dataRetrievalFn - function to retrieve data
* data retrieval function
* @param {RequestLogger} logger - werelogs logger instance * @param {RequestLogger} logger - werelogs logger instance
* @param {String} [s3config] - s3 configuration * @param {String} [s3config] - s3 configuration
* @returns {undefined} * @returns {undefined}
@ -161,7 +153,7 @@ function routes(req, res, params, logger, s3config) {
allEndpoints, allEndpoints,
websiteEndpoints, websiteEndpoints,
blacklistedPrefixes, blacklistedPrefixes,
dataRetrievalParams, dataRetrievalFn,
} = params; } = params;
const clientInfo = { const clientInfo = {
@ -182,8 +174,7 @@ function routes(req, res, params, logger, s3config) {
logger.newRequestLoggerFromSerializedUids(reqUids) : logger.newRequestLoggerFromSerializedUids(reqUids) :
logger.newRequestLogger()); logger.newRequestLogger());
if (!req.url.startsWith('/_/healthcheck') && if (!req.url.startsWith('/_/healthcheck')) {
!req.url.startsWith('/_/report')) {
log.info('received request', clientInfo); log.info('received request', clientInfo);
} }
@ -244,11 +235,10 @@ function routes(req, res, params, logger, s3config) {
// bucket website request // bucket website request
if (websiteEndpoints && websiteEndpoints.indexOf(req.parsedHost) > -1) { if (websiteEndpoints && websiteEndpoints.indexOf(req.parsedHost) > -1) {
return routeWebsite(req, res, api, log, statsClient, return routeWebsite(req, res, api, log, statsClient, dataRetrievalFn);
dataRetrievalParams);
} }
return method(req, res, api, log, statsClient, dataRetrievalParams); return method(req, res, api, log, statsClient, dataRetrievalFn);
} }
module.exports = routes; module.exports = routes;

View File

@ -1,8 +1,7 @@
const errors = require('../../errors'); const errors = require('../../errors');
const routesUtils = require('../routesUtils'); const routesUtils = require('../routesUtils');
function routerGET(request, response, api, log, statsClient, function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
dataRetrievalParams) {
log.debug('routing request', { method: 'routerGET' }); log.debug('routing request', { method: 'routerGET' });
if (request.bucketName === undefined && request.objectKey !== undefined) { if (request.bucketName === undefined && request.objectKey !== undefined) {
routesUtils.responseXMLBody(errors.NoSuchBucket, null, response, log); routesUtils.responseXMLBody(errors.NoSuchBucket, null, response, log);
@ -100,13 +99,6 @@ function routerGET(request, response, api, log, statsClient,
return routesUtils.responseXMLBody(err, xml, response, return routesUtils.responseXMLBody(err, xml, response,
log, corsHeaders); log, corsHeaders);
}); });
} else if (request.query.search !== undefined) {
api.callApiMethod('metadataSearch', request, response, log,
(err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response,
log, corsHeaders);
});
} else { } else {
// GET bucket // GET bucket
api.callApiMethod('bucketGet', request, response, log, api.callApiMethod('bucketGet', request, response, log,
@ -165,8 +157,8 @@ function routerGET(request, response, api, log, statsClient,
log.end().addDefaultFields({ contentLength }); log.end().addDefaultFields({ contentLength });
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseStreamData(err, request.query, return routesUtils.responseStreamData(err, request.query,
resMetaHeaders, dataGetInfo, dataRetrievalParams, resMetaHeaders, dataGetInfo, dataRetrievalFn, response,
response, range, log); range, log);
}); });
} }
} }

View File

@ -2,7 +2,7 @@ const errors = require('../../errors');
const routesUtils = require('../routesUtils'); const routesUtils = require('../routesUtils');
function routerWebsite(request, response, api, log, statsClient, function routerWebsite(request, response, api, log, statsClient,
dataRetrievalParams) { dataRetrievalFn) {
log.debug('routing request', { method: 'routerWebsite' }); log.debug('routing request', { method: 'routerWebsite' });
// website endpoint only supports GET and HEAD and must have a bucket // website endpoint only supports GET and HEAD and must have a bucket
// http://docs.aws.amazon.com/AmazonS3/latest/dev/WebsiteEndpoints.html // http://docs.aws.amazon.com/AmazonS3/latest/dev/WebsiteEndpoints.html
@ -27,7 +27,7 @@ function routerWebsite(request, response, api, log, statsClient,
// user has their own error page // user has their own error page
if (err && dataGetInfo) { if (err && dataGetInfo) {
return routesUtils.streamUserErrorPage(err, dataGetInfo, return routesUtils.streamUserErrorPage(err, dataGetInfo,
dataRetrievalParams, response, resMetaHeaders, log); dataRetrievalFn, response, resMetaHeaders, log);
} }
// send default error html response // send default error html response
if (err) { if (err) {
@ -37,7 +37,7 @@ function routerWebsite(request, response, api, log, statsClient,
} }
// no error, stream data // no error, stream data
return routesUtils.responseStreamData(null, request.query, return routesUtils.responseStreamData(null, request.query,
resMetaHeaders, dataGetInfo, dataRetrievalParams, response, resMetaHeaders, dataGetInfo, dataRetrievalFn, response,
null, log); null, log);
}); });
} }

View File

@ -4,9 +4,6 @@ const errors = require('../errors');
const constants = require('../constants'); const constants = require('../constants');
const { eachSeries } = require('async'); const { eachSeries } = require('async');
const DataWrapper = require('../storage/data/DataWrapper');
const { objectKeyByteLimit } = require('../constants');
const responseErr = new Error(); const responseErr = new Error();
responseErr.code = 'ResponseError'; responseErr.code = 'ResponseError';
responseErr.message = 'response closed by client request before all data sent'; responseErr.message = 'response closed by client request before all data sent';
@ -118,7 +115,7 @@ const XMLResponseBackend = {
`<Message>${errCode.description}</Message>`, `<Message>${errCode.description}</Message>`,
'<Resource></Resource>', '<Resource></Resource>',
`<RequestId>${log.getSerializedUids()}</RequestId>`, `<RequestId>${log.getSerializedUids()}</RequestId>`,
'</Error>', '</Error>'
); );
const xmlStr = xml.join(''); const xmlStr = xml.join('');
const bytesSent = Buffer.byteLength(xmlStr); const bytesSent = Buffer.byteLength(xmlStr);
@ -260,7 +257,7 @@ function okContentHeadersResponse(overrideParams, resHeaders,
return response; return response;
} }
function retrieveDataAzure(locations, retrieveDataParams, response, logger) { function retrieveDataAzure(locations, retrieveDataFn, response, logger) {
const errorHandlerFn = () => { response.connection.destroy(); }; const errorHandlerFn = () => { response.connection.destroy(); };
const current = locations.shift(); const current = locations.shift();
@ -268,18 +265,7 @@ function retrieveDataAzure(locations, retrieveDataParams, response, logger) {
logger.error('error piping data from source'); logger.error('error piping data from source');
errorHandlerFn(err); errorHandlerFn(err);
}); });
const { return retrieveDataFn(current, response, logger, err => {
client,
implName,
config,
kms,
metadata,
locStorageCheckFn,
vault,
} = retrieveDataParams;
const data = new DataWrapper(
client, implName, config, kms, metadata, locStorageCheckFn, vault);
return data.get(current, response, logger, err => {
if (err) { if (err) {
logger.error('failed to get object from source', { logger.error('failed to get object from source', {
error: err, error: err,
@ -292,12 +278,12 @@ function retrieveDataAzure(locations, retrieveDataParams, response, logger) {
}); });
} }
function retrieveData(locations, retrieveDataParams, response, log) { function retrieveData(locations, retrieveDataFn, response, log) {
if (locations.length === 0) { if (locations.length === 0) {
return response.end(); return response.end();
} }
if (locations[0].azureStreamingOptions) { if (locations[0].azureStreamingOptions) {
return retrieveDataAzure(locations, retrieveDataParams, response, log); return retrieveDataAzure(locations, retrieveDataFn, response, log);
} }
// response is of type http.ServerResponse // response is of type http.ServerResponse
let responseDestroyed = false; let responseDestroyed = false;
@ -307,33 +293,16 @@ function retrieveData(locations, retrieveDataParams, response, log) {
response.destroy(); response.destroy();
responseDestroyed = true; responseDestroyed = true;
}; };
const _destroyReadable = readable => {
// s3-data sends Readable stream only which does not implement destroy
if (readable && readable.destroy) {
readable.destroy();
}
};
// the S3-client might close the connection while we are processing it // the S3-client might close the connection while we are processing it
response.once('close', () => { response.once('close', () => {
responseDestroyed = true; responseDestroyed = true;
_destroyReadable(currentStream); if (currentStream) {
currentStream.destroy();
}
}); });
const {
client,
implName,
config,
kms,
metadata,
locStorageCheckFn,
vault,
} = retrieveDataParams;
const data = new DataWrapper(
client, implName, config, kms, metadata, locStorageCheckFn, vault);
return eachSeries(locations, return eachSeries(locations,
(current, next) => data.get(current, response, log, (current, next) => retrieveDataFn(current, response, log,
(err, readable) => { (err, readable) => {
// NB: readable is of IncomingMessage type // NB: readable is of IncomingMessage type
if (err) { if (err) {
@ -350,7 +319,7 @@ function retrieveData(locations, retrieveDataParams, response, log) {
if (responseDestroyed || response.isclosed) { if (responseDestroyed || response.isclosed) {
log.debug( log.debug(
'response destroyed before readable could stream'); 'response destroyed before readable could stream');
_destroyReadable(readable); readable.destroy();
return next(responseErr); return next(responseErr);
} }
// readable stream successfully consumed // readable stream successfully consumed
@ -376,7 +345,7 @@ function retrieveData(locations, retrieveDataParams, response, log) {
// call end for all cases (error/success) per node.js docs // call end for all cases (error/success) per node.js docs
// recommendation // recommendation
response.end(); response.end();
}, }
); );
} }
@ -492,8 +461,7 @@ const routesUtils = {
* @param {array | null} dataLocations -- * @param {array | null} dataLocations --
* - array of locations to get streams from sproxyd * - array of locations to get streams from sproxyd
* - null if no data for object and only metadata * - null if no data for object and only metadata
* @param {object} retrieveDataParams - params to create instance of data * @param {function} retrieveDataFn - function to handle streaming data
* retrieval function
* @param {http.ServerResponse} response - response sent to the client * @param {http.ServerResponse} response - response sent to the client
* @param {array | undefined} range - range in format of [start, end] * @param {array | undefined} range - range in format of [start, end]
* if range header contained in request or undefined if not * if range header contained in request or undefined if not
@ -501,7 +469,7 @@ const routesUtils = {
* @return {undefined} * @return {undefined}
*/ */
responseStreamData(errCode, overrideParams, resHeaders, dataLocations, responseStreamData(errCode, overrideParams, resHeaders, dataLocations,
retrieveDataParams, response, range, log) { retrieveDataFn, response, range, log) {
if (errCode && !response.headersSent) { if (errCode && !response.headersSent) {
return XMLResponseBackend.errorResponse(errCode, response, log, return XMLResponseBackend.errorResponse(errCode, response, log,
resHeaders); resHeaders);
@ -537,21 +505,20 @@ const routesUtils = {
httpCode: response.statusCode, httpCode: response.statusCode,
}); });
}); });
return retrieveData(dataLocations, retrieveDataParams, response, log); return retrieveData(dataLocations, retrieveDataFn, response, log);
}, },
/** /**
* @param {object} err -- arsenal error object * @param {object} err -- arsenal error object
* @param {array} dataLocations -- * @param {array} dataLocations --
* - array of locations to get streams from backend * - array of locations to get streams from backend
* @param {object} retrieveDataParams - params to create instance of * @param {function} retrieveDataFn - function to handle streaming data
* data retrieval function
* @param {http.ServerResponse} response - response sent to the client * @param {http.ServerResponse} response - response sent to the client
* @param {object} corsHeaders - CORS-related response headers * @param {object} corsHeaders - CORS-related response headers
* @param {object} log - Werelogs logger * @param {object} log - Werelogs logger
* @return {undefined} * @return {undefined}
*/ */
streamUserErrorPage(err, dataLocations, retrieveDataParams, response, streamUserErrorPage(err, dataLocations, retrieveDataFn, response,
corsHeaders, log) { corsHeaders, log) {
setCommonResponseHeaders(corsHeaders, response, log); setCommonResponseHeaders(corsHeaders, response, log);
response.writeHead(err.code, { 'Content-type': 'text/html' }); response.writeHead(err.code, { 'Content-type': 'text/html' });
@ -560,7 +527,7 @@ const routesUtils = {
httpCode: response.statusCode, httpCode: response.statusCode,
}); });
}); });
return retrieveData(dataLocations, retrieveDataParams, response, log); return retrieveData(dataLocations, retrieveDataFn, response, log);
}, },
/** /**
@ -591,7 +558,7 @@ const routesUtils = {
`<h1>${err.code} ${response.statusMessage}</h1>`, `<h1>${err.code} ${response.statusMessage}</h1>`,
'<ul>', '<ul>',
`<li>Code: ${err.message}</li>`, `<li>Code: ${err.message}</li>`,
`<li>Message: ${err.description}</li>`, `<li>Message: ${err.description}</li>`
); );
if (!userErrorPageFailure && bucketName) { if (!userErrorPageFailure && bucketName) {
@ -601,7 +568,7 @@ const routesUtils = {
`<li>RequestId: ${log.getSerializedUids()}</li>`, `<li>RequestId: ${log.getSerializedUids()}</li>`,
// AWS response contains HostId here. // AWS response contains HostId here.
// TODO: consider adding // TODO: consider adding
'</ul>', '</ul>'
); );
if (userErrorPageFailure) { if (userErrorPageFailure) {
html.push( html.push(
@ -611,13 +578,13 @@ const routesUtils = {
'<ul>', '<ul>',
`<li>Code: ${err.message}</li>`, `<li>Code: ${err.message}</li>`,
`<li>Message: ${err.description}</li>`, `<li>Message: ${err.description}</li>`,
'</ul>', '</ul>'
); );
} }
html.push( html.push(
'<hr/>', '<hr/>',
'</body>', '</body>',
'</html>', '</html>'
); );
return response.end(html.join(''), 'utf8', () => { return response.end(html.join(''), 'utf8', () => {
@ -847,7 +814,7 @@ const routesUtils = {
return bucketName; return bucketName;
} }
throw new Error( throw new Error(
`bad request: hostname ${host} is not in valid endpoints`, `bad request: hostname ${host} is not in valid endpoints`
); );
}, },
@ -914,9 +881,6 @@ const routesUtils = {
if (invalidPrefix) { if (invalidPrefix) {
return { isValid: false, invalidPrefix }; return { isValid: false, invalidPrefix };
} }
if (Buffer.byteLength(objectKey, 'utf8') > objectKeyByteLimit) {
return { isValid: false };
}
return { isValid: true }; return { isValid: true };
}, },

View File

@ -29,4 +29,5 @@ server.start(() => {
logger.info('Metadata Proxy Server successfully started. ' + logger.info('Metadata Proxy Server successfully started. ' +
`Using the ${metadataWrapper.implName} backend`); `Using the ${metadataWrapper.implName} backend`);
}); });
``` ```

View File

@ -160,7 +160,7 @@ class TestMatrix {
const result = Object.keys(matrixChild.params) const result = Object.keys(matrixChild.params)
.every(currentKey => .every(currentKey =>
Object.prototype.toString.call( Object.prototype.toString.call(
matrixChild.params[currentKey], matrixChild.params[currentKey]
).indexOf('Array') === -1); ).indexOf('Array') === -1);
if (result === true) { if (result === true) {

View File

@ -3,14 +3,14 @@
"engines": { "engines": {
"node": ">=16" "node": ">=16"
}, },
"version": "8.1.39", "version": "7.10.15",
"description": "Common utilities for the S3 project components", "description": "Common utilities for the S3 project components",
"main": "build/index.js", "main": "build/index.js",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/scality/Arsenal.git" "url": "git+https://github.com/scality/Arsenal.git"
}, },
"author": "Scality Inc.", "author": "Giorgio Regni",
"license": "Apache-2.0", "license": "Apache-2.0",
"bugs": { "bugs": {
"url": "https://github.com/scality/Arsenal/issues" "url": "https://github.com/scality/Arsenal/issues"
@ -21,18 +21,17 @@
"JSONStream": "^1.0.0", "JSONStream": "^1.0.0",
"agentkeepalive": "^4.1.3", "agentkeepalive": "^4.1.3",
"ajv": "6.12.2", "ajv": "6.12.2",
"async": "~2.6.1", "async": "~2.1.5",
"aws-sdk": "2.80.0", "aws-sdk": "2.80.0",
"azure-storage": "2.10.3", "azure-storage": "2.10.3",
"backo": "^1.1.0", "backo": "^1.1.0",
"base-x": "3.0.8", "base-x": "3.0.8",
"base62": "2.0.1", "base62": "2.0.1",
"bson": "4.0.0", "bson": "4.0.0",
"debug": "~4.1.0", "debug": "~2.6.9",
"diskusage": "^1.1.1", "diskusage": "^1.1.1",
"fcntl": "github:scality/node-fcntl#0.2.0", "fcntl": "github:scality/node-fcntl#0.2.0",
"hdclient": "scality/hdclient#1.1.0", "hdclient": "scality/hdclient#1.1.0",
"https-proxy-agent": "^2.2.0",
"ioredis": "^4.28.5", "ioredis": "^4.28.5",
"ipaddr.js": "1.9.1", "ipaddr.js": "1.9.1",
"level": "~5.0.1", "level": "~5.0.1",
@ -40,11 +39,11 @@
"mongodb": "^3.0.1", "mongodb": "^3.0.1",
"node-forge": "^0.7.1", "node-forge": "^0.7.1",
"prom-client": "10.2.3", "prom-client": "10.2.3",
"simple-glob": "^0.2.0", "simple-glob": "^0.2",
"socket.io": "2.4.1", "socket.io": "~2.3.0",
"socket.io-client": "2.4.0", "socket.io-client": "~2.3.0",
"sproxydclient": "github:scality/sproxydclient#8.0.3", "sproxydclient": "github:scality/sproxydclient#8.0.3",
"utf8": "3.0.0", "utf8": "2.1.2",
"uuid": "^3.0.1", "uuid": "^3.0.1",
"werelogs": "scality/werelogs#8.1.0", "werelogs": "scality/werelogs#8.1.0",
"xml2js": "~0.4.23" "xml2js": "~0.4.23"
@ -58,13 +57,13 @@
"@sinonjs/fake-timers": "^6.0.1", "@sinonjs/fake-timers": "^6.0.1",
"@types/jest": "^27.4.1", "@types/jest": "^27.4.1",
"@types/node": "^17.0.21", "@types/node": "^17.0.21",
"eslint": "^8.9.0", "eslint": "2.13.1",
"eslint-config-airbnb": "6.2.0", "eslint-config-airbnb": "6.2.0",
"eslint-config-scality": "scality/Guidelines#ec33dfb", "eslint-config-scality": "scality/Guidelines#7.10.2",
"eslint-plugin-react": "^4.3.0", "eslint-plugin-react": "^4.3.0",
"jest": "^27.5.1", "jest": "^27.5.1",
"mocha": "8.0.1",
"mongodb-memory-server": "^6.0.2", "mongodb-memory-server": "^6.0.2",
"nyc": "^15.1.0",
"sinon": "^9.0.2", "sinon": "^9.0.2",
"temp": "0.9.1", "temp": "0.9.1",
"ts-jest": "^27.1.3", "ts-jest": "^27.1.3",
@ -78,15 +77,11 @@
"test": "jest tests/unit", "test": "jest tests/unit",
"build": "tsc", "build": "tsc",
"prepare": "yarn build || true", "prepare": "yarn build || true",
"ft_test": "jest tests/functional --testTimeout=120000 --forceExit", "ft_test": "jest tests/functional --testTimeout=120000 --forceExit"
"coverage": "nyc --clean jest tests --coverage --testTimeout=120000 --forceExit"
}, },
"jest": {
"private": true, "private": true,
"jest": {
"maxWorkers": 1, "maxWorkers": 1,
"coverageReporters": [
"json"
],
"collectCoverageFrom": [ "collectCoverageFrom": [
"lib/**/*.{js,ts}", "lib/**/*.{js,ts}",
"index.js" "index.js"
@ -99,12 +94,5 @@
} }
} }
} }
},
"nyc": {
"tempDirectory": "coverage",
"reporter": [
"lcov",
"text"
]
} }
} }

View File

@ -41,7 +41,7 @@ describe('KMIP Low Level Driver', () => {
return done(err); return done(err);
} }
const responsePayload = response.lookup( const responsePayload = response.lookup(
'Response Message/Batch Item/Response Payload', 'Response Message/Batch Item/Response Payload'
)[0]; )[0];
assert.deepStrictEqual(responsePayload, assert.deepStrictEqual(responsePayload,
requestPayload); requestPayload);

View File

@ -1,463 +0,0 @@
const async = require('async');
const assert = require('assert');
const sinon = require('sinon');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const { errors, versioning } = require('../../../../index');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const BucketInfo = require('../../../../lib/models/BucketInfo');
const MetadataWrapper =
require('../../../../lib/storage/metadata/MetadataWrapper');
const genVID = require('../../../../lib/versioning/VersionID').generateVersionId;
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'test-bucket';
const replicationGroupId = 'RG001';
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27018 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
let uidCounter = 0;
function generateVersionId() {
return genVID(`${process.pid}.${uidCounter++}`,
replicationGroupId);
}
const variations = [
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0 },
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1 },
];
describe('MongoClientInterface::metadata.deleteObjectMD', () => {
let metadata;
let collection;
function getObjectCount(cb) {
collection.countDocuments((err, count) => {
if (err) {
cb(err);
}
cb(null, count);
});
}
function getObject(key, cb) {
collection.findOne({
_id: key,
}, {}, (err, doc) => {
if (err) {
return cb(err);
}
if (!doc) {
return cb(errors.NoSuchKey);
}
return cb(null, doc.value);
});
}
beforeAll(done => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27018',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.setup(done);
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
variations.forEach(variation => {
const itOnlyInV1 = variation.vFormat === 'v1' ? it : it.skip;
describe(`vFormat : ${variation.vFormat}`, () => {
beforeEach(done => {
const bucketMD = BucketInfo.fromObj({
_name: BUCKET_NAME,
_owner: 'testowner',
_ownerDisplayName: 'testdisplayname',
_creationDate: new Date().toJSON(),
_acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
_mdBucketModelVersion: 10,
_transient: false,
_deleted: false,
_serverSideEncryption: null,
_versioningConfiguration: null,
_locationConstraint: 'us-east-1',
_readLocationConstraint: null,
_cors: null,
_replicationConfiguration: null,
_lifecycleConfiguration: null,
_uid: '',
_isNFS: null,
ingestion: null,
});
async.series([
next => {
metadata.client.defaultBucketKeyFormat = variation.vFormat;
return next();
},
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
if (err) {
return next(err);
}
collection = metadata.client.getCollection(BUCKET_NAME);
return next();
}),
], done);
});
afterEach(done => {
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
it(`Should delete non versioned object ${variation.vFormat}`, done => {
const params = {
objName: 'non-deleted-object',
objVal: {
key: 'non-deleted-object',
versionId: 'null',
},
};
const versionParams = {
versioning: false,
versionId: null,
repairMaster: null,
};
return async.series([
next => {
// we put the master version of object
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// we put the master version of a second object
params.objName = 'object-to-deleted';
params.objVal.key = 'object-to-deleted';
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// We delete the first object
metadata.deleteObjectMD(BUCKET_NAME, params.objName, null, logger, next);
},
next => {
// Object must be removed
metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return next();
});
},
next => {
// only 1 object remaining in db
getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 1);
return next();
});
},
], done);
});
it(`Should not throw error when object non existent ${variation.vFormat}`, done => {
const objName = 'non-existent-object';
metadata.deleteObjectMD(BUCKET_NAME, objName, null, logger, err => {
assert.deepStrictEqual(err, null);
return done();
});
});
it(`Should not throw error when bucket non existent ${variation.vFormat}`, done => {
const objName = 'non-existent-object';
metadata.deleteObjectMD(BUCKET_NAME, objName, null, logger, err => {
assert.deepStrictEqual(err, null);
return done();
});
});
it(`Master should not be updated when non lastest version is deleted ${variation.vFormat}`, done => {
let versionId1 = null;
const params = {
objName: 'test-object',
objVal: {
key: 'test-object',
versionId: 'null',
},
vFormat: 'v0',
};
const versionParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
return async.series([
next => {
// we start by creating a new version and master
versionId1 = generateVersionId(this.replicationGroupId);
params.versionId = versionId1;
params.objVal.versionId = versionId1;
versionParams.versionId = versionId1;
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// we create a second version of the same object (master is updated)
params.objVal.versionId = 'version2';
versionParams.versionId = null;
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// we delete the first version
metadata.deleteObjectMD(BUCKET_NAME, params.objName, { versionId: versionId1 },
logger, next);
},
next => {
// the first version should no longer be available
metadata.getObjectMD(BUCKET_NAME, params.objName, { versionId: versionId1 }, logger, err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return next();
});
},
next => {
// master must be containing second version metadata
metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.notStrictEqual(data.versionId, versionId1);
return next();
});
},
next => {
// master and one version remaining in db
getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 2);
return next();
});
},
], done);
});
it(`Master should be updated when last version is deleted ${variation.vFormat}`, done => {
let versionId1;
let versionId2;
const params = {
objName: 'test-object',
objVal: {
key: 'test-object',
versionId: 'null',
isLast: false,
},
};
const versionParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
return async.series([
next => {
// we start by creating a new version and master
versionId1 = generateVersionId(this.replicationGroupId);
params.versionId = versionId1;
params.objVal.versionId = versionId1;
versionParams.versionId = versionId1;
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// we create a second version of the same object (master is updated)
// params.objVal.versionId = 'version2';
// versionParams.versionId = null;
versionId2 = generateVersionId(this.replicationGroupId);
params.versionId = versionId2;
params.objVal.versionId = versionId2;
versionParams.versionId = versionId2;
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// deleting latest version
metadata.deleteObjectMD(BUCKET_NAME, params.objName, { versionId: versionId2 },
logger, next);
},
next => {
// latest version must be removed
metadata.getObjectMD(BUCKET_NAME, params.objName, { versionId: versionId2 }, logger, err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return next();
});
},
next => {
// master must be updated to contain first version data
metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.versionId, versionId1);
return next();
});
},
next => {
// one master and version in the db
getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 2);
return next();
});
},
], done);
});
it(`Should fail when version id non existent ${variation.vFormat}`, done => {
const versionId = generateVersionId(this.replicationGroupId);
const objName = 'test-object';
metadata.deleteObjectMD(BUCKET_NAME, objName, { versionId }, logger, err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return done();
});
});
itOnlyInV1(`Should create master when delete marker removed ${variation.vFormat}`, done => {
const objVal = {
key: 'test-object',
isDeleteMarker: false,
};
const params = {
versioning: true,
versionId: null,
repairMaster: null,
};
let firstVersionVersionId;
let deleteMarkerVersionId;
async.series([
// We first create a new version and master
next => metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, (err, res) => {
if (err) {
return next(err);
}
firstVersionVersionId = JSON.parse(res).versionId;
return next();
}),
// putting a delete marker as last version
next => {
objVal.isDeleteMarker = true;
params.versionId = null;
return metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, (err, res) => {
if (err) {
return next(err);
}
deleteMarkerVersionId = JSON.parse(res).versionId;
return next();
});
},
next => {
// using fake clock to override the setTimeout used by the repair
const clock = sinon.useFakeTimers();
return metadata.deleteObjectMD(BUCKET_NAME, 'test-object', { versionId: deleteMarkerVersionId },
logger, () => {
// running the repair callback
clock.runAll();
clock.restore();
return next();
});
},
// waiting for the repair callback to finish
next => setTimeout(next, 100),
// master should be created
next => {
getObject('\x7fMtest-object', (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, 'test-object');
assert.strictEqual(object.versionId, firstVersionVersionId);
assert.strictEqual(object.isDeleteMarker, false);
return next();
});
},
], done);
});
itOnlyInV1(`Should delete master when delete marker becomes last version ${variation.vFormat}`, done => {
const objVal = {
key: 'test-object',
isDeleteMarker: false,
};
const params = {
versioning: true,
versionId: null,
repairMaster: null,
};
let versionId;
async.series([
// We first create a new version and master
next => metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, next),
// putting a delete marker as last version
next => {
objVal.isDeleteMarker = true;
params.versionId = null;
return metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, next);
},
// putting new version on top of delete marker
next => {
objVal.isDeleteMarker = false;
return metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, (err, res) => {
if (err) {
return next(err);
}
versionId = JSON.parse(res).versionId;
return next();
});
},
next => {
// using fake clock to override the setTimeout used by the repair
const clock = sinon.useFakeTimers();
return metadata.deleteObjectMD(BUCKET_NAME, 'test-object', { versionId },
logger, () => {
// running the repair callback
clock.runAll();
clock.restore();
return next();
});
},
// waiting for the repair callback to finish
next => setTimeout(next, 100),
// master must be deleted
next => {
getObject('\x7fMtest-object', err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return next();
});
},
], done);
});
});
});
});

View File

@ -1,283 +0,0 @@
const async = require('async');
const assert = require('assert');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const { errors, versioning } = require('../../../../index');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const BucketInfo = require('../../../../lib/models/BucketInfo');
const MetadataWrapper =
require('../../../../lib/storage/metadata/MetadataWrapper');
const genVID = versioning.VersionID.generateVersionId;
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const { formatMasterKey } = require('../../../../lib/storage/metadata/mongoclient/utils');
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'test-bucket';
const replicationGroupId = 'RG001';
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27019 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
let uidCounter = 0;
function generateVersionId() {
return genVID(`${process.pid}.${uidCounter++}`,
replicationGroupId);
}
const variations = [
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0 },
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1 },
];
describe('MongoClientInterface::metadata.getObjectMD', () => {
let metadata;
let collection;
let versionId1;
let versionId2;
let params = {
objName: 'pfx1-test-object',
objVal: {
key: 'pfx1-test-object',
versionId: 'null',
},
};
function updateMasterObject(objName, versionId, objVal, vFormat, cb) {
const mKey = formatMasterKey(objName, vFormat);
collection.updateOne(
{
_id: mKey,
$or: [{
'value.versionId': {
$exists: false,
},
},
{
'value.versionId': {
$gt: versionId,
},
},
],
},
{
$set: { _id: mKey, value: objVal },
},
{ upsert: true },
err => {
if (err) {
return cb(err);
}
return cb(null);
});
}
beforeAll(done => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27019',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.setup(done);
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
variations.forEach(variation => {
const itOnlyInV1 = variation.vFormat === 'v1' ? it : it.skip;
describe(`vFormat : ${variation.vFormat}`, () => {
beforeEach(done => {
const bucketMD = BucketInfo.fromObj({
_name: BUCKET_NAME,
_owner: 'testowner',
_ownerDisplayName: 'testdisplayname',
_creationDate: new Date().toJSON(),
_acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
_mdBucketModelVersion: 10,
_transient: false,
_deleted: false,
_serverSideEncryption: null,
_versioningConfiguration: null,
_locationConstraint: 'us-east-1',
_readLocationConstraint: null,
_cors: null,
_replicationConfiguration: null,
_lifecycleConfiguration: null,
_uid: '',
_isNFS: null,
ingestion: null,
});
const versionParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
async.series([
next => {
metadata.client.defaultBucketKeyFormat = variation.vFormat;
return next();
},
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
if (err) {
return next(err);
}
collection = metadata.client.getCollection(BUCKET_NAME);
return next();
}),
next => {
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, (err, res) => {
if (err) {
return next(err);
}
versionId1 = JSON.parse(res).versionId;
return next(null);
});
},
next => {
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, (err, res) => {
if (err) {
return next(err);
}
versionId2 = JSON.parse(res).versionId;
return next(null);
});
},
], done);
});
afterEach(done => {
// reset params
params = {
objName: 'pfx1-test-object',
objVal: {
key: 'pfx1-test-object',
versionId: 'null',
},
};
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
it(`Should return latest version of object ${variation.it}`, done =>
metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, params.objName);
assert.strictEqual(object.versionId, versionId2);
return done();
}));
it(`Should return the specified version of object ${variation.it}`, done =>
metadata.getObjectMD(BUCKET_NAME, params.objName, { versionId: versionId1 }, logger, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, params.objName);
assert.strictEqual(object.versionId, versionId1);
return done();
}));
it(`Should throw error when version non existent ${variation.it}`, done => {
const versionId = '1234567890';
return metadata.getObjectMD(BUCKET_NAME, params.objName, { versionId }, logger, (err, object) => {
assert.deepStrictEqual(object, undefined);
assert.deepStrictEqual(err, errors.NoSuchKey);
return done();
});
});
it(`Should throw error when object non existent ${variation.it}`, done => {
const objName = 'non-existent-object';
return metadata.getObjectMD(BUCKET_NAME, objName, null, logger, err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return done();
});
});
it(`Should throw error when object non existent ${variation.it}`, done => {
const bucketName = 'non-existent-bucket';
return metadata.getObjectMD(bucketName, params.objName, null, logger, (err, object) => {
assert.deepStrictEqual(object, undefined);
assert.deepStrictEqual(err, errors.NoSuchKey);
return done();
});
});
it(`Should return latest version when master is PHD ${variation.it}`, done => {
async.series([
next => {
const objectName = variation.vFormat === 'v0' ? 'pfx1-test-object' : '\x7fMpfx1-test-object';
// adding isPHD flag to master
const phdVersionId = generateVersionId();
params.objVal.versionId = phdVersionId;
params.objVal.isPHD = true;
updateMasterObject(objectName, phdVersionId, params.objVal,
variation.vFormat, next);
},
// Should return latest object version
next => metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, params.objName);
assert.strictEqual(object.versionId, versionId2);
delete params.isPHD;
return next();
}),
], done);
});
itOnlyInV1(`Should return last version when master deleted ${variation.vFormat}`, done => {
const versioningParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
async.series([
// putting a delete marker as last version
next => {
params.versionId = null;
params.objVal.isDeleteMarker = true;
return metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal, versioningParams,
logger, next);
},
next => metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, params.objName);
assert.strictEqual(object.isDeleteMarker, true);
params.objVal.isDeleteMarker = null;
return next();
}),
], done);
});
});
});
});

View File

@ -1,412 +0,0 @@
const async = require('async');
const assert = require('assert');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const BucketInfo = require('../../../../lib/models/BucketInfo');
const MetadataWrapper =
require('../../../../lib/storage/metadata/MetadataWrapper');
const { versioning } = require('../../../../index');
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'test-bucket';
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27020 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
const variations = [
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0 },
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1 },
];
describe('MongoClientInterface::metadata.listObject', () => {
let metadata;
function putBulkObjectVersions(bucketName, objName, objVal, params, versionNb, cb) {
let count = 0;
async.whilst(
() => count < versionNb,
cbIterator => {
count++;
// eslint-disable-next-line
return metadata.putObjectMD(bucketName, objName, objVal, params,
logger, cbIterator);
},
err => {
if (err) {
return cb(err);
}
return cb(null);
},
);
}
beforeAll(done => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27020',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.setup(done);
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
variations.forEach(variation => {
beforeEach(done => {
const bucketMD = BucketInfo.fromObj({
_name: BUCKET_NAME,
_owner: 'testowner',
_ownerDisplayName: 'testdisplayname',
_creationDate: new Date().toJSON(),
_acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
_mdBucketModelVersion: 10,
_transient: false,
_deleted: false,
_serverSideEncryption: null,
_versioningConfiguration: null,
_locationConstraint: 'us-east-1',
_readLocationConstraint: null,
_cors: null,
_replicationConfiguration: null,
_lifecycleConfiguration: null,
_uid: '',
_isNFS: null,
ingestion: null,
});
const versionParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
async.series([
next => {
metadata.client.defaultBucketKeyFormat = variation.vFormat;
return next();
},
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
if (err) {
return next(err);
}
return next();
}),
next => {
const params = {
objName: 'pfx1-test-object',
objVal: {
key: 'pfx1-test-object',
versionId: 'null',
},
nbVersions: 5,
};
putBulkObjectVersions(BUCKET_NAME, params.objName, params.objVal, versionParams,
params.nbVersions, next);
},
next => {
const params = {
objName: 'pfx2-test-object',
objVal: {
key: 'pfx2-test-object',
versionId: 'null',
},
nbVersions: 5,
};
putBulkObjectVersions(BUCKET_NAME, params.objName, params.objVal, versionParams,
params.nbVersions, next);
},
next => {
const params = {
objName: 'pfx3-test-object',
objVal: {
key: 'pfx3-test-object',
versionId: 'null',
},
nbVersions: 5,
};
putBulkObjectVersions(BUCKET_NAME, params.objName, params.objVal, versionParams,
params.nbVersions, next);
},
], done);
});
afterEach(done => {
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
it(`Should list master versions of objects ${variation.it}`, done => {
const params = {
listingType: 'DelimiterMaster',
maxKeys: 100,
};
return metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.Contents.length, 3);
assert.strictEqual(data.Contents[0].key, 'pfx1-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx2-test-object');
assert.strictEqual(data.Contents[2].key, 'pfx3-test-object');
return done();
});
});
it(`Should truncate list of master versions of objects ${variation.it}`, done => {
const params = {
listingType: 'DelimiterMaster',
maxKeys: 2,
};
return metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.Contents.length, 2);
assert.strictEqual(data.Contents[0].key, 'pfx1-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx2-test-object');
return done();
});
});
it(`Should list master versions of objects that start with prefix ${variation.it}`, done => {
const bucketName = BUCKET_NAME;
const params = {
listingType: 'DelimiterMaster',
maxKeys: 100,
prefix: 'pfx2',
};
return metadata.listObject(bucketName, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx2-test-object');
return done();
});
});
it(`Should return empty results when bucket non existent (master) ${variation.it}`, done => {
const bucketName = 'non-existent-bucket';
const params = {
listingType: 'DelimiterMaster',
maxKeys: 100,
};
return metadata.listObject(bucketName, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert(data);
assert.strictEqual(data.Contents.length, 0);
return done();
});
});
it(`Should list all versions of objects ${variation.it}`, done => {
const bucketName = BUCKET_NAME;
const params = {
listingType: 'DelimiterVersions',
maxKeys: 1000,
};
const versionsPerKey = {};
return metadata.listObject(bucketName, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.Versions.length, 15);
data.Versions.forEach(version => {
versionsPerKey[version.key] = (versionsPerKey[version.key] || 0) + 1;
});
assert.strictEqual(versionsPerKey['pfx1-test-object'], 5);
assert.strictEqual(versionsPerKey['pfx2-test-object'], 5);
assert.strictEqual(versionsPerKey['pfx3-test-object'], 5);
return done();
});
});
it(`Should truncate list of master versions of objects ${variation.it}`, done => {
const params = {
listingType: 'DelimiterVersions',
maxKeys: 5,
};
const versionsPerKey = {};
return metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.Versions.length, 5);
data.Versions.forEach(version => {
versionsPerKey[version.key] = (versionsPerKey[version.key] || 0) + 1;
});
assert.strictEqual(versionsPerKey['pfx1-test-object'], 5);
return done();
});
});
it(`Should list versions of objects that start with prefix ${variation.it}`, done => {
const params = {
listingType: 'DelimiterVersions',
maxKeys: 100,
prefix: 'pfx2',
};
const versionsPerKey = {};
return metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.Versions.length, 5);
data.Versions.forEach(version => {
versionsPerKey[version.key] = (versionsPerKey[version.key] || 0) + 1;
});
assert.strictEqual(versionsPerKey['pfx2-test-object'], 5);
return done();
});
});
it(`Should return empty results when bucket not existing (version) ${variation.it}`, done => {
const bucketName = 'non-existent-bucket';
const params = {
listingType: 'DelimiterVersions',
maxKeys: 100,
};
return metadata.listObject(bucketName, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert(data);
assert.strictEqual(data.Versions.length, 0);
return done();
});
});
it(`should check entire list with pagination (version) ${variation.it}`, done => {
const versionsPerKey = {};
const bucketName = BUCKET_NAME;
const get = (maxKeys, keyMarker, versionIdMarker, cb) => metadata.listObject(bucketName, {
listingType: 'DelimiterVersions',
maxKeys,
keyMarker,
versionIdMarker,
}, logger, (err, res) => {
if (err) {
return cb(err);
}
res.Versions.forEach(version => {
versionsPerKey[version.key] = (versionsPerKey[version.key] || 0) + 1;
});
if (res.IsTruncated) {
return get(maxKeys, res.NextKeyMarker, res.NextVersionIdMarker, cb);
}
return cb(null);
});
return get(3, null, null, err => {
assert.deepStrictEqual(err, null);
assert.strictEqual(Object.keys(versionsPerKey).length, 3);
assert.strictEqual(versionsPerKey['pfx1-test-object'], 5);
assert.strictEqual(versionsPerKey['pfx2-test-object'], 5);
assert.strictEqual(versionsPerKey['pfx3-test-object'], 5);
done();
});
});
it(`should not list phd master key when listing masters ${variation.it}`, done => {
const objVal = {
key: 'pfx1-test-object',
versionId: 'null',
};
const versionParams = {
versioning: true,
};
const params = {
listingType: 'DelimiterMaster',
prefix: 'pfx1',
};
let versionId;
let lastVersionId;
async.series([
next => metadata.putObjectMD(BUCKET_NAME, 'pfx1-test-object', objVal, versionParams,
logger, (err, res) => {
if (err) {
return next(err);
}
versionId = JSON.parse(res).versionId;
return next(null);
}),
next => metadata.putObjectMD(BUCKET_NAME, 'pfx1-test-object', objVal, versionParams,
logger, (err, res) => {
if (err) {
return next(err);
}
lastVersionId = JSON.parse(res).versionId;
return next(null);
}),
// when deleting the last version of an object a PHD master is created
// and kept for 15s before it's repaired
next => metadata.deleteObjectMD(BUCKET_NAME, 'pfx1-test-object', { versionId: lastVersionId },
logger, next),
next => metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.Contents[0].value.VersionId, versionId);
return next();
}),
], done);
});
it(`should not list phd master key when listing versions ${variation.it}`, done => {
const objVal = {
key: 'pfx1-test-object',
versionId: 'null',
};
const versionParams = {
versioning: true,
};
const params = {
listingType: 'DelimiterVersions',
prefix: 'pfx1',
};
let lastVersionId;
let versionIds;
async.series([
next => metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.Versions.length, 5);
versionIds = data.Versions.map(version => version.VersionId);
return next();
}),
next => metadata.putObjectMD(BUCKET_NAME, 'pfx1-test-object', objVal, versionParams,
logger, (err, res) => {
if (err) {
return next(err);
}
lastVersionId = JSON.parse(res).versionId;
return next(null);
}),
// when deleting the last version of an object a PHD master is created
// and kept for 15s before it's repaired
next => metadata.deleteObjectMD(BUCKET_NAME, 'pfx1-test-object', { versionId: lastVersionId },
logger, next),
next => metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
const newVersionIds = data.Versions.map(version => version.VersionId);
assert.strictEqual(data.Versions.length, 5);
assert(versionIds.every(version => newVersionIds.includes(version)));
return next();
}),
], done);
});
});
});

View File

@ -1,429 +0,0 @@
const async = require('async');
const assert = require('assert');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const { errors, versioning } = require('../../../../index');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const BucketInfo = require('../../../../lib/models/BucketInfo');
const MetadataWrapper =
require('../../../../lib/storage/metadata/MetadataWrapper');
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'test-bucket';
const OBJECT_NAME = 'test-object';
const VERSION_ID = '98451712418844999999RG001 22019.0';
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27021 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
const variations = [
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0 },
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1 },
];
describe('MongoClientInterface:metadata.putObjectMD', () => {
let metadata;
let collection;
function getObject(key, cb) {
collection.findOne({
_id: key,
}, {}, (err, doc) => {
if (err) {
return cb(err);
}
if (!doc) {
return cb(errors.NoSuchKey);
}
return cb(null, doc.value);
});
}
function getObjectCount(cb) {
collection.countDocuments((err, count) => {
if (err) {
cb(err);
}
cb(null, count);
});
}
beforeAll(done => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27021',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.setup(done);
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
variations.forEach(variation => {
const itOnlyInV1 = variation.vFormat === 'v1' ? it : it.skip;
describe(`vFormat : ${variation.vFormat}`, () => {
beforeEach(done => {
const bucketMD = BucketInfo.fromObj({
_name: BUCKET_NAME,
_owner: 'testowner',
_ownerDisplayName: 'testdisplayname',
_creationDate: new Date().toJSON(),
_acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
_mdBucketModelVersion: 10,
_transient: false,
_deleted: false,
_serverSideEncryption: null,
_versioningConfiguration: null,
_locationConstraint: 'us-east-1',
_readLocationConstraint: null,
_cors: null,
_replicationConfiguration: null,
_lifecycleConfiguration: null,
_uid: '',
_isNFS: null,
ingestion: null,
});
async.series([
next => {
metadata.client.defaultBucketKeyFormat = variation.vFormat;
return next();
},
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
if (err) {
return next(err);
}
collection = metadata.client.getCollection(BUCKET_NAME);
return next();
}),
], done);
});
afterEach(done => {
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
it(`Should put a new non versionned object ${variation.it}`, done => {
const objVal = {
key: OBJECT_NAME,
versionId: 'null',
updated: false,
};
const params = {
versioning: null,
versionId: null,
repairMaster: null,
};
async.series([
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
next => {
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
getObject(key, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, OBJECT_NAME);
return next();
});
},
// When versionning not active only one document is created (master)
next => getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 1);
return next();
}),
], done);
});
it(`Should update the metadata ${variation.it}`, done => {
const objVal = {
key: OBJECT_NAME,
versionId: 'null',
updated: false,
};
const params = {
versioning: null,
versionId: null,
repairMaster: null,
};
async.series([
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
next => {
objVal.updated = true;
metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
},
// object metadata must be updated
next => {
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
getObject(key, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, OBJECT_NAME);
assert.strictEqual(object.updated, true);
return next();
});
},
// Only a master version should be created
next => getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 1);
return next();
}),
], done);
});
it(`Should put versionned object with the specific versionId ${variation.it}`, done => {
const objVal = {
key: OBJECT_NAME,
versionId: VERSION_ID,
updated: false,
};
const params = {
versioning: true,
versionId: VERSION_ID,
repairMaster: null,
};
async.series([
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
// checking if metadata corresponds to what was given to the function
next => {
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
getObject(key, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, OBJECT_NAME);
assert.strictEqual(object.versionId, VERSION_ID);
return next();
});
},
// We'll have one master and one version
next => getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 2);
return next();
}),
], done);
});
it(`Should put new version and update master ${variation.it}`, done => {
const objVal = {
key: OBJECT_NAME,
versionId: VERSION_ID,
updated: false,
};
const params = {
versioning: true,
versionId: null,
repairMaster: null,
};
let versionId = null;
async.series([
// We first create a master and a version
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
versionId = JSON.parse(data).versionId;
return next();
}),
// We put another version of the object
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
// Master must be updated
next => {
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
getObject(key, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, OBJECT_NAME);
assert.notStrictEqual(object.versionId, versionId);
return next();
});
},
// we'll have two versions and one master
next => getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 3);
return next();
}),
], done);
});
it(`Should update master when versionning is disabled ${variation.it}`, done => {
const objVal = {
key: OBJECT_NAME,
versionId: VERSION_ID,
updated: false,
};
const params = {
versioning: true,
versionId: null,
repairMaster: null,
};
let versionId = null;
async.series([
// We first create a new version and master
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
versionId = JSON.parse(data).versionId;
return next();
}),
next => {
// Disabling versionning and putting new version
params.versioning = false;
params.versionId = '';
return metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
},
// Master must be updated
next => {
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
getObject(key, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, OBJECT_NAME);
assert.notStrictEqual(object.versionId, versionId);
return next();
});
},
// The second put shouldn't create a new version
next => getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 2);
return next();
}),
], done);
});
it(`Should update latest version and repair master ${variation.it}`, done => {
const objVal = {
key: OBJECT_NAME,
versionId: VERSION_ID,
updated: false,
};
const params = {
versioning: true,
versionId: VERSION_ID,
repairMaster: null,
};
async.series([
// We first create a new version and master
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
next => {
// Updating the version and repairing master
params.repairMaster = true;
objVal.updated = true;
return metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
},
// Master must be updated
next => {
const key = variation.vFormat === 'v0' ? 'test-object' : '\x7fMtest-object';
getObject(key, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, OBJECT_NAME);
assert.strictEqual(object.versionId, VERSION_ID);
assert.strictEqual(object.updated, true);
return next();
});
},
// The second put shouldn't create a new version
next => getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 2);
return next();
}),
], done);
});
itOnlyInV1(`Should delete master when last version is delete marker ${variation.it}`, done => {
const objVal = {
key: OBJECT_NAME,
versionId: VERSION_ID,
updated: false,
isDeleteMarker: false,
};
const params = {
versioning: true,
versionId: VERSION_ID,
repairMaster: null,
};
async.series([
// We first create a new version and master
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
// putting a delete marker as last version
next => {
objVal.isDeleteMarker = true;
params.versionId = null;
return metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
},
// master must be deleted
next => getObject('\x7fMtest-object', err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return next();
}),
], done);
});
itOnlyInV1(`Should create master when new version is put on top of delete marker ${variation.it}`, done => {
const objVal = {
key: OBJECT_NAME,
versionId: VERSION_ID,
updated: false,
isDeleteMarker: false,
};
const params = {
versioning: true,
versionId: VERSION_ID,
repairMaster: null,
};
async.series([
// We first create a new version and master
next => metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next),
// putting a delete marker as last version
next => {
objVal.isDeleteMarker = true;
params.versionId = null;
return metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
},
// We put a new version on top of delete marker
next => {
objVal.isDeleteMarker = false;
objVal.updated = true;
return metadata.putObjectMD(BUCKET_NAME, OBJECT_NAME, objVal, params, logger, next);
},
// master must be created
next => getObject('\x7fMtest-object', (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, OBJECT_NAME);
assert.strictEqual(object.updated, true);
assert.strictEqual(object.isDeleteMarker, false);
return next();
}),
], done);
});
});
});
});

View File

@ -1,330 +0,0 @@
const async = require('async');
const assert = require('assert');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const { errors, versioning } = require('../../../../index');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const BucketInfo = require('../../../../lib/models/BucketInfo');
const MetadataWrapper =
require('../../../../lib/storage/metadata/MetadataWrapper');
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const IMP_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'testbucket';
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27022 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
describe('MongoClientInterface:withCond', () => {
let metadata;
const variations = [
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0 },
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1 },
];
beforeAll(done => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27022',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMP_NAME, opts, null, logger);
metadata.setup(done);
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
variations.forEach(variation => {
describe('::putObjectWithCond', () => {
beforeEach(done => {
const bucketMD = BucketInfo.fromObj({
_name: BUCKET_NAME,
_owner: 'testowner',
_ownerDisplayName: 'testdisplayname',
_creationDate: new Date().toJSON(),
_acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
_mdBucketModelVersion: 10,
_transient: false,
_deleted: false,
_serverSideEncryption: null,
_versioningConfiguration: null,
_locationConstraint: 'us-east-1',
_readLocationConstraint: null,
_cors: null,
_replicationConfiguration: null,
_lifecycleConfiguration: null,
_uid: '',
_isNFS: null,
ingestion: null,
});
async.series([
next => {
metadata.client.defaultBucketKeyFormat = variation.vFormat;
return next();
},
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, next),
], done);
});
afterEach(done => {
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
const tests = [
[
`should upsert object if an existing object does not exist ${variation.it}`,
{
initVal: null,
upsertVal: { value: { number: 42, string: 'forty-two' } },
conditions: { value: { number: 24 } },
expectedVal: { value: { number: 42, string: 'forty-two' } },
error: null,
},
],
[
`should not update an existing object if the conditions fails ${variation.it}`,
{
initVal: { value: { number: 0, string: 'zero' } },
upsertVal: { value: { number: 42, string: 'forty-two' } },
conditions: { value: { number: 24 } },
expectedVal: { value: { number: 0, string: 'zero' } },
error: errors.InternalError,
},
],
[
`should not update an existing object if the conditions fails ${variation.it}`,
{
initVal: { value: { number: 0, string: 'zero' } },
upsertVal: { value: { number: 42, string: 'forty-two' } },
conditions: { value: { string: { $eq: 'twenty-four' } } },
expectedVal: { value: { number: 0, string: 'zero' } },
error: errors.InternalError,
},
],
[
`should not update an existing object if the conditions fails ${variation.it}`,
{
initVal: { value: { number: 0, string: 'zero' } },
upsertVal: { value: { number: 42, string: 'forty-two' } },
conditions: {
value: {
string: { $eq: 'twenty-four' },
number: { $eq: 0 },
},
},
expectedVal: { value: { number: 0, string: 'zero' } },
error: errors.InternalError,
},
],
[
`should update an existing object if the conditions passes ${variation.it}`,
{
initVal: { value: { number: 24, string: 'twenty-four' } },
upsertVal: { value: { number: 42, string: 'forty-two' } },
conditions: { value: { number: 24 } },
expectedVal: { value: { number: 42, string: 'forty-two' } },
error: null,
},
],
[
`should update an existing object if the conditions passes ${variation.it}`,
{
initVal: { value: { number: 24, string: 'twenty-four' } },
upsertVal: { value: { number: 42, string: 'forty-two' } },
conditions: { value: { string: { $eq: 'twenty-four' } } },
expectedVal: { value: { number: 42, string: 'forty-two' } },
error: null,
},
],
[
`should update an existing object if the conditions passes ${variation.it}`,
{
initVal: { value: { number: 24, string: 'twenty-four' } },
upsertVal: { value: { number: 42, string: 'forty-two' } },
conditions: {
value: {
string: { $eq: 'twenty-four' },
number: { $eq: 24 },
},
},
expectedVal: { value: { number: 42, string: 'forty-two' } },
error: null,
},
],
];
tests.forEach(([msg, testCase]) => it(msg, done => {
const objectKey = 'testkey';
const {
initVal, upsertVal, conditions, expectedVal, error,
} = testCase;
const params = { conditions };
async.series([
next => {
if (!initVal) {
return next();
}
return metadata.putObjectMD(BUCKET_NAME, objectKey, initVal,
{}, logger, next);
},
next => metadata.putObjectWithCond(BUCKET_NAME, objectKey,
upsertVal, params, logger, err => {
if (error) {
assert.deepStrictEqual(err, error);
return next();
}
assert(!err);
return next();
}),
next => metadata.getObjectMD(BUCKET_NAME, objectKey, {}, logger,
(err, res) => {
assert(!err);
assert.deepStrictEqual(res, expectedVal);
next();
}),
], done);
}));
});
describe('::deleteObjectWithCond', () => {
const tests = [
[
`should return no such key if the object does not exist ${variation.it}`,
{
initVal: null,
conditions: { value: { number: 24 } },
expectedVal: null,
error: errors.NoSuchKey,
},
],
[
`should return no such key if the conditions fails ${variation.it}`,
{
initVal: { value: { number: 0, string: 'zero' } },
conditions: { value: { number: { $eq: 24 } } },
expectedVal: { value: { number: 0, string: 'zero' } },
error: errors.NoSuchKey,
},
],
[
`should return no such key if the conditions fails ${variation.it}`,
{
initVal: { value: { number: 0, string: 'zero' } },
conditions: { value: { string: 'twenty-four' } },
expectedVal: { value: { number: 0, string: 'zero' } },
error: errors.NoSuchKey,
},
],
[
`should return no such key if the conditions fails ${variation.it}`,
{
initVal: { value: { number: 0, string: 'zero' } },
conditions: {
value: {
string: 'twenty-four',
number: { $eq: 0 },
},
},
expectedVal: { value: { number: 0, string: 'zero' } },
error: errors.NoSuchKey,
},
],
[
`should successfully delete matched object ${variation.it}`,
{
initVal: { value: { number: 24, string: 'twenty-four' } },
conditions: { value: { number: 24 } },
expectedVal: null,
error: null,
},
],
[
`should successfully delete matched object ${variation.it}`,
{
initVal: { value: { number: 24, string: 'twenty-four' } },
conditions: { value: { string: { $eq: 'twenty-four' } } },
expectedVal: null,
error: null,
},
],
[
`should successfully delete matched object ${variation.it}`,
{
initVal: { value: { number: 24, string: 'twenty-four' } },
conditions: {
value: {
string: { $eq: 'twenty-four' },
number: { $eq: 24 },
},
},
expectedVal: null,
error: null,
},
],
];
tests.forEach(([msg, testCase]) => it(msg, done => {
const objectKey = 'testkey';
const { initVal, conditions, expectedVal, error } = testCase;
const params = { conditions };
async.series([
next => {
if (!initVal) {
return next();
}
return metadata.putObjectMD(BUCKET_NAME, objectKey, initVal,
{}, logger, next);
},
next => metadata.deleteObjectWithCond(BUCKET_NAME, objectKey,
params, logger, err => {
if (error) {
assert.deepStrictEqual(err, error);
return next();
}
assert(!err);
return next();
}),
next => metadata.getObjectMD(BUCKET_NAME, objectKey, {}, logger,
(err, res) => {
if (expectedVal) {
assert.deepStrictEqual(res, expectedVal);
} else {
assert.deepStrictEqual(err, errors.NoSuchKey);
}
return next();
}),
], done);
}));
});
});
});

View File

@ -1,319 +0,0 @@
'use strict'; // eslint-disable-line strict
const werelogs = require('werelogs');
const assert = require('assert');
const async = require('async');
const logger = new werelogs.Logger('MetadataProxyServer', 'debug', 'debug');
const MetadataWrapper =
require('../../../lib/storage/metadata/MetadataWrapper');
const BucketRoutes =
require('../../../lib/storage/metadata/proxy/BucketdRoutes');
const metadataWrapper = new MetadataWrapper('mem', {}, null, logger);
const { RequestDispatcher } = require('../../utils/mdProxyUtils');
const routes = new BucketRoutes(metadataWrapper, logger);
const dispatcher = new RequestDispatcher(routes);
const Bucket = 'test';
const bucketInfo = {
acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
name: Bucket,
owner: '9d8fe19a78974c56dceb2ea4a8f01ed0f5fecb9d29f80e9e3b84104e4a3ea520',
ownerDisplayName: 'anonymousCoward',
creationDate: '2018-06-04T17:45:42.592Z',
mdBucketModelVersion: 8,
transient: false,
deleted: false,
serverSideEncryption: null,
versioningConfiguration: null,
locationConstraint: 'us-east-1',
readLocationConstraint: 'us-east-1',
cors: null,
replicationConfiguration: null,
lifecycleConfiguration: null,
uid: 'fea97818-6a9a-11e8-9777-e311618cc5d4',
isNFS: null,
};
const objects = [
'aaa',
'bbb/xaa',
'bbb/xbb',
'bbb/xcc',
'ccc',
'ddd',
];
function _getExpectedListing(prefix, objects) {
const filtered = objects.map(key => {
const deprefixed = key.slice(prefix.length);
return deprefixed.replace(/[/].*/, '/');
});
const keySet = {};
return filtered.filter(key => {
if (keySet[key]) {
return false;
}
if (key === '') {
return false;
}
keySet[key] = true;
return true;
});
}
function _listingURL(prefix, marker) {
const reSlash = /[/]/g;
const escapedPrefix = prefix.replace(reSlash, '%2F');
const escapedMarker = marker.replace(reSlash, '%2F');
return `/default/bucket/${Bucket}?delimiter=%2F&prefix=` +
`${escapedPrefix}&maxKeys=1&marker=${escapedMarker}`;
}
function _listObjects(prefix, objects, cb) {
const keys = _getExpectedListing(prefix, objects);
const markers = keys.slice(0);
markers.unshift(undefined);
const lastKey = keys[keys.length - 1];
const listing = keys.map((key, index) => ({
key,
marker: markers[index],
NextMarker: markers[index + 1],
IsTruncated: key !== lastKey,
isPrefix: key.endsWith('/'),
}));
async.mapLimit(listing, 5, (obj, next) => {
const currentMarker = obj.marker === undefined ? '' : obj.marker;
dispatcher.get(_listingURL(prefix, prefix + currentMarker),
(err, response, body) => {
if (err) {
return next(err);
}
if (obj.isPrefix) {
assert.strictEqual(body.Contents.length, 0);
assert.strictEqual(body.CommonPrefixes.length,
1);
assert.strictEqual(body.CommonPrefixes[0],
prefix + obj.key);
} else {
assert.strictEqual(body.Contents.length, 1);
assert.strictEqual(body.CommonPrefixes.length,
0);
assert.strictEqual(body.Contents[0].key,
prefix + obj.key);
}
assert.strictEqual(body.IsTruncated,
obj.IsTruncated);
if (body.IsTruncated) {
assert.strictEqual(body.NextMarker,
prefix + obj.NextMarker);
}
return next();
});
}, err => cb(err));
}
function _createObjects(objects, cb) {
async.mapLimit(objects, 5, (key, next) => {
dispatcher.post(`/default/bucket/${Bucket}/${key}`,
{ key }, next);
}, err => {
cb(err);
});
}
function _readObjects(objects, cb) {
async.mapLimit(objects, 5, (key, next) => {
dispatcher.get(`/default/bucket/${Bucket}/${key}`,
(err, response, body) => {
assert.deepStrictEqual(body.key, key);
next(err);
});
}, err => {
cb(err);
});
}
function _deleteObjects(objects, cb) {
async.mapLimit(objects, 5, (key, next) => {
dispatcher.delete(`/default/bucket/${Bucket}/${key}`,
err => next(err));
}, err => {
cb(err);
});
}
describe('Basic Metadata Proxy Server test',
() => {
jest.setTimeout(10000);
it('Shoud get the metadataInformation', done => {
dispatcher.get('/default/metadataInformation',
(err, response, body) => {
if (err) {
return done(err);
}
assert.deepStrictEqual(
body, { metadataVersion: 2 });
return done();
});
});
});
describe('Basic Metadata Proxy Server CRUD test', () => {
jest.setTimeout(10000);
beforeEach(done => {
dispatcher.post(`/default/bucket/${Bucket}`, bucketInfo,
done);
});
afterEach(done => {
dispatcher.delete(`/default/bucket/${Bucket}`, done);
});
it('Should get the bucket attributes', done => {
dispatcher.get(`/default/attributes/${Bucket}`,
(err, response, body) => {
if (err) {
return done(err);
}
assert.deepStrictEqual(body.name,
bucketInfo.name);
return done();
});
});
it('Should crud an object', done => {
async.waterfall([
next => dispatcher.post(`/default/bucket/${Bucket}/test1`,
{ foo: 'gabu' }, err => next(err)),
next => dispatcher.get(`/default/bucket/${Bucket}/test1`,
(err, response, body) => {
if (!err) {
assert.deepStrictEqual(body.foo,
'gabu');
next(err);
}
}),
next => dispatcher.post(`/default/bucket/${Bucket}/test1`,
{ foo: 'zome' }, err => next(err)),
next => dispatcher.get(`/default/bucket/${Bucket}/test1`,
(err, response, body) => {
if (!err) {
assert.deepStrictEqual(body.foo,
'zome');
next(err);
}
}),
next => dispatcher.delete(`/default/bucket/${Bucket}/test1`,
err => next(err)),
], err => done(err));
});
it('Should list objects', done => {
async.waterfall([
next => _createObjects(objects, next),
next => _readObjects(objects, next),
next => _listObjects('', objects, next),
next => _listObjects('bbb/', objects, next),
next => _deleteObjects(objects, next),
], err => {
done(err);
});
});
it('Should update bucket properties', done => {
dispatcher.get(
`/default/attributes/${Bucket}`, (err, response, body) => {
assert.strictEqual(err, null);
const bucketInfo = body;
const newOwnerDisplayName = 'divertedfrom';
bucketInfo.ownerDisplayName = newOwnerDisplayName;
dispatcher.post(
`/default/attributes/${Bucket}`, bucketInfo, err => {
assert.strictEqual(err, null);
dispatcher.get(
`/default/attributes/${Bucket}`,
(err, response, body) => {
assert.strictEqual(err, null);
const newBucketInfo = body;
assert.strictEqual(
newBucketInfo.ownerDisplayName,
newOwnerDisplayName);
done(null);
});
});
});
});
it('Should fail to list a non existing bucket', done => {
dispatcher.get('/default/bucket/nonexisting',
(err, response) => {
assert.strictEqual(
response.responseHead.statusCode,
404);
done(err);
});
});
it('Should fail to get attributes from a non existing bucket', done => {
dispatcher.get('/default/attributes/nonexisting',
(err, response) => {
assert.strictEqual(
response.responseHead.statusCode,
404);
done(err);
});
});
it('should succeed a health check', done => {
dispatcher.get('/_/healthcheck', (err, response, body) => {
if (err) {
return done(err);
}
const expectedResponse = {
memorybucket: {
code: 200,
message: 'OK',
},
};
assert.strictEqual(response.responseHead.statusCode, 200);
assert.deepStrictEqual(body, expectedResponse);
return done(err);
});
});
it('should work with parallel route', done => {
const objectName = 'theObj';
async.waterfall([
next => _createObjects([objectName], next),
next => {
dispatcher.get(
`/default/parallel/${Bucket}/${objectName}`,
(err, response, body) => {
if (err) {
return next(err);
}
assert.strictEqual(response.responseHead.statusCode,
200);
const bucketMD = JSON.parse(body.bucket);
const objectMD = JSON.parse(body.obj);
const expectedObjectMD = { key: objectName };
assert.deepStrictEqual(bucketMD.name,
bucketInfo.name);
assert.deepStrictEqual(objectMD, expectedObjectMD);
return next(err);
});
},
next => _deleteObjects([objectName], next),
], done);
});
});

View File

@ -1,318 +0,0 @@
'use strict'; // eslint-disable-line strict
const assert = require('assert');
const async = require('async');
const RedisClient = require('../../../lib/metrics/RedisClient');
const StatsModel = require('../../../lib/metrics/StatsModel');
// setup redis client
const config = {
host: '127.0.0.1',
port: 6379,
enableOfflineQueue: true,
};
const fakeLogger = {
trace: () => {},
error: () => {},
};
const redisClient = new RedisClient(config, fakeLogger);
// setup stats model
const STATS_INTERVAL = 300; // 5 minutes
const STATS_EXPIRY = 86400; // 24 hours
const statsModel = new StatsModel(redisClient, STATS_INTERVAL, STATS_EXPIRY);
function setExpectedStats(expected) {
return expected.concat(
Array((STATS_EXPIRY / STATS_INTERVAL) - expected.length).fill(0));
}
// Since many methods were overwritten, these tests should validate the changes
// made to the original methods
describe('StatsModel class', () => {
const id = 'arsenal-test';
const id2 = 'test-2';
const id3 = 'test-3';
afterEach(() => redisClient.clear(() => {}));
it('should convert a 2d array columns into rows and vice versa using _zip',
() => {
const arrays = [
[1, 2, 3],
[4, 5, 6],
[7, 8, 9],
];
const res = statsModel._zip(arrays);
const expected = [
[1, 4, 7],
[2, 5, 8],
[3, 6, 9],
];
assert.deepStrictEqual(res, expected);
});
it('_zip should return an empty array if given an invalid array', () => {
const arrays = [];
const res = statsModel._zip(arrays);
assert.deepStrictEqual(res, []);
});
it('_getCount should return a an array of all valid integer values',
() => {
const res = statsModel._getCount([
[null, '1'],
[null, '2'],
[null, null],
]);
assert.deepStrictEqual(res, setExpectedStats([1, 2, 0]));
});
it('should correctly record a new request by default one increment',
done => {
async.series([
next => {
statsModel.reportNewRequest(id, (err, res) => {
assert.ifError(err);
const expected = [[null, 1], [null, 1]];
assert.deepStrictEqual(res, expected);
next();
});
},
next => {
statsModel.reportNewRequest(id, (err, res) => {
assert.ifError(err);
const expected = [[null, 2], [null, 1]];
assert.deepStrictEqual(res, expected);
next();
});
},
], done);
});
it('should record new requests by defined amount increments', done => {
function noop() {}
async.series([
next => {
statsModel.reportNewRequest(id, 9);
statsModel.getStats(fakeLogger, id, (err, res) => {
assert.ifError(err);
assert.deepStrictEqual(res.requests, setExpectedStats([9]));
next();
});
},
next => {
statsModel.reportNewRequest(id);
statsModel.getStats(fakeLogger, id, (err, res) => {
assert.ifError(err);
assert.deepStrictEqual(res.requests,
setExpectedStats([10]));
next();
});
},
next => {
statsModel.reportNewRequest(id, noop);
statsModel.getStats(fakeLogger, id, (err, res) => {
assert.ifError(err);
assert.deepStrictEqual(res.requests,
setExpectedStats([11]));
next();
});
},
], done);
});
it('should correctly record a 500 on the server', done => {
statsModel.report500(id, (err, res) => {
assert.ifError(err);
const expected = [[null, 1], [null, 1]];
assert.deepStrictEqual(res, expected);
done();
});
});
it('should respond back with total requests as an array', done => {
async.series([
next => {
statsModel.reportNewRequest(id, err => {
assert.ifError(err);
next();
});
},
next => {
statsModel.report500(id, err => {
assert.ifError(err);
next();
});
},
next => {
statsModel.getStats(fakeLogger, id, (err, res) => {
assert.ifError(err);
const expected = {
'requests': setExpectedStats([1]),
'500s': setExpectedStats([1]),
'sampleDuration': STATS_EXPIRY,
};
assert.deepStrictEqual(res, expected);
next();
});
},
], done);
});
it('should not crash on empty results', done => {
async.series([
next => {
statsModel.getStats(fakeLogger, id, (err, res) => {
assert.ifError(err);
const expected = {
'requests': setExpectedStats([]),
'500s': setExpectedStats([]),
'sampleDuration': STATS_EXPIRY,
};
assert.deepStrictEqual(res, expected);
next();
});
},
next => {
statsModel.getAllStats(fakeLogger, id, (err, res) => {
assert.ifError(err);
const expected = {
'requests': setExpectedStats([]),
'500s': setExpectedStats([]),
'sampleDuration': STATS_EXPIRY,
};
assert.deepStrictEqual(res, expected);
next();
});
},
], done);
});
it('should return a zero-filled array if no ids are passed to getAllStats',
done => {
statsModel.getAllStats(fakeLogger, [], (err, res) => {
assert.ifError(err);
assert.deepStrictEqual(res.requests, setExpectedStats([]));
assert.deepStrictEqual(res['500s'], setExpectedStats([]));
done();
});
});
it('should get accurately reported data for given id from getAllStats',
done => {
statsModel.reportNewRequest(id, 9);
statsModel.reportNewRequest(id2, 2);
statsModel.reportNewRequest(id3, 3);
statsModel.report500(id);
async.series([
next => {
statsModel.getAllStats(fakeLogger, [id], (err, res) => {
assert.ifError(err);
assert.equal(res.requests[0], 9);
assert.equal(res['500s'][0], 1);
next();
});
},
next => {
statsModel.getAllStats(fakeLogger, [id, id2, id3],
(err, res) => {
assert.ifError(err);
assert.equal(res.requests[0], 14);
assert.deepStrictEqual(res.requests,
setExpectedStats([14]));
next();
});
},
], done);
});
it('should normalize to the nearest hour using normalizeTimestampByHour',
() => {
const date = new Date('2018-09-13T23:30:59.195Z');
const newDate = new Date(statsModel.normalizeTimestampByHour(date));
assert.strictEqual(date.getHours(), newDate.getHours());
assert.strictEqual(newDate.getMinutes(), 0);
assert.strictEqual(newDate.getSeconds(), 0);
assert.strictEqual(newDate.getMilliseconds(), 0);
});
it('should get previous hour using _getDatePreviousHour', () => {
const date = new Date('2018-09-13T23:30:59.195Z');
const newDate = statsModel._getDatePreviousHour(new Date(date));
const millisecondsInOneHour = 3600000;
assert.strictEqual(date - newDate, millisecondsInOneHour);
});
it('should get an array of hourly timestamps using getSortedSetHours',
() => {
const epoch = 1536882476501;
const millisecondsInOneHour = 3600000;
const expected = [];
let dateInMilliseconds = statsModel.normalizeTimestampByHour(
new Date(epoch));
for (let i = 0; i < 24; i++) {
expected.push(dateInMilliseconds);
dateInMilliseconds -= millisecondsInOneHour;
}
const res = statsModel.getSortedSetHours(epoch);
assert.deepStrictEqual(res, expected);
});
it('should apply TTL on a new sorted set using addToSortedSet', done => {
const key = 'a-test-key';
const score = 100;
const value = 'a-value';
const now = Date.now();
const nearestHour = statsModel.normalizeTimestampByHour(new Date(now));
statsModel.addToSortedSet(key, score, value, (err, res) => {
assert.ifError(err);
// check both a "zadd" and "expire" occurred
assert.equal(res, 1);
redisClient.ttl(key, (err, res) => {
assert.ifError(err);
// assert this new set has a ttl applied
assert(res > 0);
const adjustmentSecs = now - nearestHour;
const msInADay = 24 * 60 * 60 * 1000;
const msInAnHour = 60 * 60 * 1000;
const upperLimitSecs =
Math.ceil((msInADay - adjustmentSecs) / 1000);
const lowerLimitSecs =
Math.floor((msInADay - adjustmentSecs - msInAnHour) / 1000);
// assert new ttl is between 23 and 24 hours adjusted by time
// elapsed since normalized hourly time
assert(res >= lowerLimitSecs);
assert(res <= upperLimitSecs);
done();
});
});
});
});

View File

@ -1,326 +0,0 @@
const assert = require('assert');
const ChainBackend = require('../../../lib/auth/auth').backends.chainBackend;
const BaseBackend = require('../../../lib/auth/auth').backends.baseBackend;
const errors = require('../../../lib/errors');
const testError = new Error('backend error');
const backendWithAllMethods = {
verifySignatureV2: () => {},
verifySignatureV4: () => {},
getCanonicalIds: () => {},
getEmailAddresses: () => {},
checkPolicies: () => {},
healthcheck: () => {},
};
function getBackendWithMissingMethod(methodName) {
const backend = Object.assign({}, backendWithAllMethods);
delete backend[methodName];
return backend;
}
class TestBackend extends BaseBackend {
constructor(service, error, result) {
super(service);
this._error = error;
this._result = result;
}
verifySignatureV2(stringToSign, signatureFromRequest, accessKey, options, callback) {
return callback(this._error, this._result);
}
verifySignatureV4(stringToSign, signatureFromRequest, accessKey, region, scopeDate, options, callback) {
return callback(this._error, this._result);
}
getCanonicalIds(emailAddresses, options, callback) {
return callback(this._error, this._result);
}
getEmailAddresses(canonicalIDs, options, callback) {
return callback(this._error, this._result);
}
checkPolicies(requestContextParams, userArn, options, callback) {
return callback(this._error, this._result);
}
healthcheck(reqUid, callback) {
return callback(this._error, this._result);
}
}
describe('Auth Backend: Chain Backend', () => {
[
['should throw an error if client list is not an array', null],
['should throw an error if client list empty', []],
['should throw an error if a client is missing the verifySignatureV2 method', [
new TestBackend(),
getBackendWithMissingMethod('verifySignatureV2'),
]],
['should throw an error if a client is missing the verifySignatureV4 auth method', [
new TestBackend(),
getBackendWithMissingMethod('verifySignatureV4'),
]],
['should throw an error if a client is missing the getCanonicalId method', [
new TestBackend(),
getBackendWithMissingMethod('getCanonicalIds'),
]],
['should throw an error if a client is missing the getEmailAddresses method', [
new TestBackend(),
getBackendWithMissingMethod('getEmailAddresses'),
]],
['should throw an error if a client is missing the checkPolicies method', [
new TestBackend(),
getBackendWithMissingMethod('checkPolicies'),
]],
['should throw an error if a client is missing the healthcheck method', [
new TestBackend(),
getBackendWithMissingMethod('healthcheck'),
]],
].forEach(([msg, input]) => it(msg, () => {
assert.throws(() => {
new ChainBackend('chain', input); // eslint-disable-line no-new
});
}));
[
// function name, function args
['verifySignatureV2', [null, null, null, null]],
['verifySignatureV4', [null, null, null, null, null, null]],
].forEach(([fn, fnArgs]) =>
describe(`::${fn}`, () => {
it('should return an error if none of the clients returns a result', done => {
const backend = new ChainBackend('chain', [
new TestBackend('test1', testError, null),
new TestBackend('test2', testError, null),
new TestBackend('test3', testError, null),
]);
backend[fn](...fnArgs, err => {
assert.deepStrictEqual(err, testError);
done();
});
});
[
[
'should return result of the first successful client (multiple successful client)',
'expectedResult',
// backend constructor args
[
['test1', null, 'expectedResult'],
['test2', null, 'test2'],
['test3', testError, null],
],
],
[
'should return result of successful client',
'expectedResult',
// backend constructor args
[
['test1', testError, null],
['test2', null, 'expectedResult'],
['test3', testError, null],
],
],
[
'should return result of successful client',
'expectedResult',
// backend constructor args
[
['test1', testError, null],
['test1', testError, null],
['test3', null, 'expectedResult'],
],
],
].forEach(([msg, expected, backendArgs]) => {
it(msg, done => {
const backend = new ChainBackend('chain',
backendArgs.map((args) => new TestBackend(...args)));
backend[fn](...fnArgs, (err, res) => {
assert.ifError(err);
assert.strictEqual(res, expected);
done();
});
});
});
}));
[
// function name, function args
['getCanonicalIds', [null, null]],
['getEmailAddresses', [null, null]],
].forEach(([fn, fnArgs]) =>
describe(`::${fn}`, () => {
it('should return an error if any of the clients fails', done => {
const backend = new ChainBackend('chain', [
new TestBackend('test1', null, { message: { body: { test1: 'aaa' } } }),
new TestBackend('test2', testError, null),
new TestBackend('test3', null, { message: { body: { test2: 'bbb' } } }),
]);
backend[fn](...fnArgs, err => {
assert.deepStrictEqual(err, testError);
done();
});
});
it('should merge results from clients into a single response object', done => {
const backend = new ChainBackend('chain', [
new TestBackend('test1', null, { message: { body: { test1: 'aaa' } } }),
new TestBackend('test2', null, { message: { body: { test2: 'bbb' } } }),
]);
backend[fn](...fnArgs, (err, res) => {
assert.ifError(err);
assert.deepStrictEqual(res, {
message: { body: {
test1: 'aaa',
test2: 'bbb',
} },
});
done();
});
});
}));
describe('::checkPolicies', () => {
it('should return an error if any of the clients fails', done => {
const backend = new ChainBackend('chain', [
new TestBackend('test1', null, {
message: { body: [{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/obj1' }] },
}),
new TestBackend('test2', testError, null),
new TestBackend('test3', null, {
message: { body: [{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/obj1' }] },
}),
]);
backend.checkPolicies(null, null, null, err => {
assert.deepStrictEqual(err, testError);
done();
});
});
it('should merge results from clients into a single response object', done => {
const backend = new ChainBackend('chain', [
new TestBackend('test1', null, {
message: { body: [{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/obj1' }] },
}),
new TestBackend('test2', null, {
message: { body: [{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/obj2' }] },
}),
new TestBackend('test3', null, {
message: { body: [{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/obj1' }] },
}),
]);
backend.checkPolicies(null, null, null, (err, res) => {
assert.ifError(err);
assert.deepStrictEqual(res, {
message: { body: [
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/obj1' },
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/obj2' },
] },
});
done();
});
});
});
describe('::_mergeObject', () => {
it('should correctly merge reponses', () => {
const objectResps = [
{ message: { body: {
id1: 'email1@test.com',
wrongformatcanid: 'WrongFormat',
id4: 'email4@test.com',
} } },
{ message: { body: {
id2: 'NotFound',
id3: 'email3@test.com',
id4: 'email5@test.com',
} } },
];
assert.deepStrictEqual(
ChainBackend._mergeObjects(objectResps),
{
id1: 'email1@test.com',
wrongformatcanid: 'WrongFormat',
id2: 'NotFound',
id3: 'email3@test.com',
// id4 should be overwritten
id4: 'email5@test.com',
},
);
});
});
describe('::_mergePolicies', () => {
it('should correctly merge policies', () => {
const policyResps = [
{ message: { body: [
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/true1' },
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/true2' },
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/false1' },
] } },
{ message: { body: [
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/true1' },
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/true2' },
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/false2' },
] } },
];
assert.deepStrictEqual(
ChainBackend._mergePolicies(policyResps),
[
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/true1' },
{ isAllowed: true, arn: 'arn:aws:s3:::policybucket/true2' },
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/false1' },
{ isAllowed: false, arn: 'arn:aws:s3:::policybucket/false2' },
],
);
});
});
describe('::checkhealth', () => {
it('should return error if a single client is unhealthy', done => {
const backend = new ChainBackend('chain', [
new TestBackend('test1', null, { code: 200 }),
new TestBackend('test2', testError, { code: 503 }),
new TestBackend('test3', null, { code: 200 }),
]);
backend.healthcheck(null, (err, res) => {
assert.deepStrictEqual(err, errors.InternalError);
assert.deepStrictEqual(res, [
{ error: null, status: { code: 200 } },
{ error: testError, status: { code: 503 } },
{ error: null, status: { code: 200 } },
]);
done();
});
});
it('should return result if all clients are healthy', done => {
const backend = new ChainBackend('chain', [
new TestBackend('test1', null, { msg: 'test1', code: 200 }),
new TestBackend('test2', null, { msg: 'test2', code: 200 }),
new TestBackend('test3', null, { msg: 'test3', code: 200 }),
]);
backend.healthcheck(null, (err, res) => {
assert.ifError(err);
assert.deepStrictEqual(res, [
{ error: null, status: { msg: 'test1', code: 200 } },
{ error: null, status: { msg: 'test2', code: 200 } },
{ error: null, status: { msg: 'test3', code: 200 } },
]);
done();
});
});
});
});

View File

@ -1,6 +1,6 @@
const assert = require('assert'); const assert = require('assert');
const Indexer = require('../../../../lib/auth/backends/in_memory/Indexer'); const Indexer = require('../../../../lib/auth/in_memory/Indexer');
const ref = require('./sample_authdata.json'); const ref = require('./sample_authdata.json');
const { should } = require('./AuthLoader.spec'); const { should } = require('./AuthLoader.spec');

View File

@ -5,7 +5,7 @@ const assert = require('assert');
const constructStringToSign = const constructStringToSign =
require('../../../../lib/auth/v2/constructStringToSign'); require('../../../../lib/auth/v2/constructStringToSign');
const hashSignature = const hashSignature =
require('../../../../lib/auth/backends/in_memory/vaultUtilities').hashSignature; require('../../../../lib/auth/in_memory/vaultUtilities').hashSignature;
const DummyRequestLogger = require('../../helpers').DummyRequestLogger; const DummyRequestLogger = require('../../helpers').DummyRequestLogger;
const log = new DummyRequestLogger(); const log = new DummyRequestLogger();

View File

@ -54,14 +54,6 @@ describe('should URIencode in accordance with AWS rules', () => {
assert.strictEqual(actualOutput, expectedOutput); assert.strictEqual(actualOutput, expectedOutput);
}); });
it('should encode codepoints that use surrogate pairs in UTF-16 as a ' +
'single UTF-8 sequence', () => {
const input = '/s3amazonaws.com/I-like-🌮s';
const expectedOutput = '%2Fs3amazonaws.com%2FI-like-%F0%9F%8C%AEs';
const actualOutput = awsURIencode(input);
assert.strictEqual(actualOutput, expectedOutput);
});
it('should skip invalid query params', () => { it('should skip invalid query params', () => {
const input = ['s3:ObjectCreated:*', 's3:ObjectRemoved:*', const input = ['s3:ObjectCreated:*', 's3:ObjectRemoved:*',
's3:BucketCreated:*', 's3:BucketRemoved:*']; 's3:BucketCreated:*', 's3:BucketRemoved:*'];

View File

@ -269,33 +269,4 @@ describe('v4 headerAuthCheck', () => {
assert.strictEqual(res.params.version, 4); assert.strictEqual(res.params.version, 4);
done(); done();
}); });
it('should not return error if proxy_path header is added', done => {
// Freezes time so date created within function will be Feb 8, 2016
const clock = fakeTimers.install({ now: 1454962445000 });
/* eslint-disable camelcase */
const alteredRequest = createAlteredRequest({
proxy_path: 'proxy/1234' }, 'headers', request, headers);
/* eslint-enable camelcase */
const res = headerAuthCheck(alteredRequest, log);
clock.uninstall();
assert.strictEqual(res.err, null);
done();
});
it('should return InvalidRequest error if proxy_path header is invalid',
done => {
// Freezes time so date created within function will be Feb 8, 2016
const clock = fakeTimers.install({ now: 1454962445000 });
/* eslint-disable camelcase */
const alteredRequest = createAlteredRequest({
proxy_path: 'absc%2proxy/1234' }, 'headers', request, headers);
/* eslint-enable camelcase */
const res = headerAuthCheck(alteredRequest, log);
clock.uninstall();
assert.deepStrictEqual(res.err,
errors.InvalidArgument.customizeDescription(
'invalid proxy_path header'));
done();
});
}); });

View File

@ -225,34 +225,4 @@ describe('v4 queryAuthCheck', () => {
assert.strictEqual(res.params.version, 4); assert.strictEqual(res.params.version, 4);
done(); done();
}); });
it('should successfully return no error if proxy_path header is added',
done => {
// Freezes time so date created within function will be Feb 8, 2016
const clock = fakeTimers.install({ now: 1454974984001 });
/* eslint-disable camelcase */
const alteredRequest = createAlteredRequest({ proxy_path:
'proxy/1234' }, 'headers', request, query);
/* eslint-enable camelcase */
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
clock.uninstall();
assert.deepStrictEqual(res.err, null);
done();
});
it('should return InvalidRequest error if proxy_path header is invalid',
done => {
// Freezes time so date created within function will be Feb 8, 2016
const clock = fakeTimers.install({ now: 1454974984001 });
/* eslint-disable camelcase */
const alteredRequest = createAlteredRequest({ proxy_path:
'absc%2proxy/1234' }, 'headers', request, query);
/* eslint-enable camelcase */
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
clock.uninstall();
assert.deepStrictEqual(res.err,
errors.InvalidArgument.customizeDescription(
'invalid proxy_path header'));
done();
});
}); });

View File

@ -3,7 +3,7 @@
const assert = require('assert'); const assert = require('assert');
const calculateSigningKey = const calculateSigningKey =
require('../../../../lib/auth/backends/in_memory/vaultUtilities') require('../../../../lib/auth/in_memory/vaultUtilities')
.calculateSigningKey; .calculateSigningKey;
describe('v4 signing key calculation', () => { describe('v4 signing key calculation', () => {

View File

@ -1,100 +0,0 @@
const assert = require('assert');
const { Readable } = require('stream');
const V4Transform =
require('../../../../../lib/auth/v4/streamingV4/V4Transform');
const Backend = require('../../../../../lib/auth/backends/in_memory/Backend').s3;
const Vault = require('../../../../../lib/auth/Vault');
const { DummyRequestLogger } = require('../../../helpers');
const log = new DummyRequestLogger();
const streamingV4Params = {
accessKey: 'accessKey1',
signatureFromRequest: '2b8637632a997e06ee7b6c85d7' +
'147d2025e8f04d4374f4d7d7320de1618c7509',
region: 'us-east-1',
scopeDate: '20170516',
timestamp: '20170516T204738Z',
credentialScope: '20170516/us-east-1/s3/aws4_request',
};
const dummyAuthData = {
accounts:
[{ name: 'Bart',
email: 'sampleaccount1@sampling.com',
arn: 'arn:aws:iam::123456789012:root',
canonicalID:
'79a59df900b949e55d96a1e698fbacedf' +
'd6e09d98eacf8f8d5218e7cd47ef2be',
shortid: '123456789012',
keys: [{ access: 'accessKey1', secret: 'verySecretKey1' }] },
{ name: 'Lisa',
email: 'sampleaccount2@sampling.com',
arn: 'arn:aws:iam::123456789013:root',
canonicalID:
'79a59df900b949e55d96a1e698fbacedf' +
'd6e09d98eacf8f8d5218e7cd47ef2bf',
shortid: '123456789013',
keys: [{ access: 'accessKey2', secret: 'verySecretKey2' }] },
],
};
const vault = new Vault(new Backend(dummyAuthData), 'vaultMem');
class AuthMe extends Readable {
constructor(chunks) {
super();
this._parts = chunks;
this._index = 0;
}
_read() {
this.push(this._parts[this._index]);
this._index++;
}
}
describe('V4Transform class', () => {
it('should authenticate successfully', done => {
const v4Transform = new V4Transform(streamingV4Params,
vault, log, err => {
assert.strictEqual(err, null);
});
const filler1 = '8;chunk-signature=51d2511f7c6887907dff20474d8db6' +
'7d557e5f515a6fa6a8466bb12f8833bcca\r\ncontents\r\n';
const filler2 = '0;chunk-signature=c0eac24b7ce72141ec077df9753db' +
'4cc8b7991491806689da0395c8bd0231e48\r\n';
const chunks = [
Buffer.from(filler1),
Buffer.from(filler2),
null,
];
const authMe = new AuthMe(chunks);
authMe.pipe(v4Transform);
v4Transform.on('finish', () => {
done();
});
});
it('should ignore data sent after final chunk', done => {
const v4Transform = new V4Transform(streamingV4Params,
vault, log, err => {
assert.strictEqual(err, null);
done();
});
const filler1 = '8;chunk-signature=51d2511f7c6887907dff20474d8db6' +
'7d557e5f515a6fa6a8466bb12f8833bcca\r\ncontents\r\n';
const filler2 = '0;chunk-signature=c0eac24b7ce72141ec077df9753db' +
'4cc8b7991491806689da0395c8bd0231e48\r\n';
const filler3 = '\r\n';
const chunks = [
Buffer.from(filler1),
Buffer.from(filler2),
Buffer.from(filler3),
null,
];
const authMe = new AuthMe(chunks);
authMe.pipe(v4Transform);
v4Transform.on('finish', () => {
done();
});
});
});

View File

@ -57,6 +57,7 @@ function zpad(key, length = 15) {
} }
class DummyRequestLogger { class DummyRequestLogger {
constructor() { constructor() {
this.ops = []; this.ops = [];
this.counts = { this.counts = {
@ -109,11 +110,5 @@ class DummyRequestLogger {
} }
} }
module.exports = { module.exports = { makeid, timeDiff, makeAuthInfo,
makeid, createAlteredRequest, zpad, DummyRequestLogger };
timeDiff,
makeAuthInfo,
createAlteredRequest,
zpad,
DummyRequestLogger,
};

View File

@ -69,7 +69,7 @@ describe('Check IP matches a list of CIDR ranges', () => {
[['192.168.1.1'], '192.168.1.1'], [['192.168.1.1'], '192.168.1.1'],
].forEach(item => ].forEach(item =>
it(`should match IP ${item[0][0]} without CIDR range`, it(`should match IP ${item[0][0]} without CIDR range`,
() => cidrListMatchCheck(item[0], item[1], true)), () => cidrListMatchCheck(item[0], item[1], true))
); );
it('should not range match if CIDR range is not provided', it('should not range match if CIDR range is not provided',

View File

@ -1,157 +0,0 @@
const assert = require('assert');
const BackendInfo = require('../../../lib/models/BackendInfo');
const { DummyRequestLogger } = require('../helpers');
const DummyConfig = require('../../utils/DummyConfig');
const log = new DummyRequestLogger();
const data = 'mem';
const memLocation = 'scality-internal-mem';
const fileLocation = 'scality-internal-file';
const legacyLocation = 'legacy';
const dummyConfig = new DummyConfig();
const dummyBackendInfo = new BackendInfo(dummyConfig, memLocation,
fileLocation, '127.0.0.1');
const dummyLegacyConfig = new DummyConfig(true);
describe('BackendInfo class', () => {
describe('controllingBackendParam', () => {
beforeEach(() => {
dummyConfig.backends.data = data;
dummyLegacyConfig.backends.data = data;
});
it('should return object with applicable error if ' +
'objectLocationConstraint is invalid', () => {
const res = BackendInfo.controllingBackendParam(dummyConfig,
'notValid', fileLocation, '127.0.0.1', log);
assert.equal(res.isValid, false);
assert((res.description).indexOf('Object Location Error')
> -1);
});
it('should return object with applicable error if ' +
'bucketLocationConstraint is invalid and no ' +
'objectLocationConstraint was provided', () => {
const res = BackendInfo.controllingBackendParam(dummyConfig,
undefined, 'notValid', '127.0.0.1', log);
assert.equal(res.isValid, false);
assert((res.description).indexOf('Bucket ' +
'Location Error') > -1);
});
it('If requestEndpoint is invalid, no objectLocationConstraint or ' +
'bucketLocationConstraint was provided, data backend is set to ' +
'"scality" should return "object with applicable error"', () => {
dummyConfig.backends.data = 'scality';
const res = BackendInfo.controllingBackendParam(dummyConfig,
undefined, undefined, 'notValid', log);
assert.equal(res.isValid, false);
assert((res.description).indexOf('Endpoint Location Error') > -1);
});
it('If requestEndpoint is invalid, no objectLocationConstraint or ' +
'bucketLocationConstraint was provided, data backend is set to ' +
'"scality" should return isValid if legacy location constraint', () => {
dummyLegacyConfig.backends.data = 'scality';
const res = BackendInfo.controllingBackendParam(dummyLegacyConfig,
undefined, undefined, 'notValid', log);
assert.equal(res.isValid, true);
});
it('If requestEndpoint is invalid, no objectLocationConstraint or ' +
'bucketLocationConstraint was provided and data backend is set to ' +
'"multiple" and legacy location constraint should return ' +
'"object with applicable error"', () => {
dummyConfig.backends.data = 'multiple';
const res = BackendInfo.controllingBackendParam(dummyConfig,
undefined, undefined, 'notValid', log);
assert.equal(res.isValid, false);
assert((res.description).indexOf('Endpoint Location Error') > -1);
});
it('If requestEndpoint is invalid, no objectLocationConstraint or ' +
'bucketLocationConstraint was provided and data backend is set to ' +
'"multiple" and legacy location constraint should return isValid if ' +
'legacy location constraint', () => {
dummyLegacyConfig.backends.data = 'multiple';
const res = BackendInfo.controllingBackendParam(dummyLegacyConfig,
undefined, undefined, 'notValid', log);
assert.equal(res.isValid, true);
});
it('should return isValid if requestEndpoint is invalid and ' +
'data backend is set to "file"', () => {
dummyConfig.backends.data = 'file';
const res = BackendInfo.controllingBackendParam(dummyConfig,
memLocation, fileLocation, 'notValid', log);
assert.equal(res.isValid, true);
});
it('should return isValid if requestEndpoint is invalid and ' +
'data backend is set to "mem"', () => {
dummyConfig.backends.data = 'mem';
const res = BackendInfo.controllingBackendParam(dummyConfig,
memLocation, fileLocation, 'notValid', log);
assert.equal(res.isValid, true);
});
it('should return isValid if requestEndpoint is invalid but ' +
'valid objectLocationConstraint was provided', () => {
dummyConfig.backends.data = 'multiple';
const res = BackendInfo.controllingBackendParam(dummyConfig,
memLocation, undefined, 'notValid', log);
assert.equal(res.isValid, true);
});
it('should return isValid if requestEndpoint is invalid but ' +
'valid bucketLocationConstraint was provided', () => {
dummyConfig.backends.data = 'multiple';
const res = BackendInfo.controllingBackendParam(dummyConfig,
undefined, memLocation, 'notValid', log);
assert.equal(res.isValid, true);
});
it('should return isValid if all backend ' +
'parameters are valid', () => {
const res = BackendInfo.controllingBackendParam(dummyConfig,
memLocation, fileLocation, '127.0.0.1', log);
assert.equal(res.isValid, true);
});
});
describe('getControllingLocationConstraint', () => {
it('should return object location constraint', () => {
const controllingLC =
dummyBackendInfo.getControllingLocationConstraint();
assert.strictEqual(controllingLC, memLocation);
});
});
describe('legacy for getControllingLocationConstraint', () => {
const dummyBackendInfoLegacy = new BackendInfo(dummyLegacyConfig, null,
null, '127.0.0.1', legacyLocation);
it('should return legacy location constraint', () => {
const controllingLC =
dummyBackendInfoLegacy.getControllingLocationConstraint();
assert.strictEqual(controllingLC, legacyLocation);
});
});
describe('getters', () => {
it('should return object location constraint', () => {
const objectLC =
dummyBackendInfo.getObjectLocationConstraint();
assert.strictEqual(objectLC, memLocation);
});
it('should return bucket location constraint', () => {
const bucketLC =
dummyBackendInfo.getBucketLocationConstraint();
assert.strictEqual(bucketLC, fileLocation);
});
it('should return request endpoint', () => {
const reqEndpoint =
dummyBackendInfo.getRequestEndpoint();
assert.strictEqual(reqEndpoint, '127.0.0.1');
});
});
});

View File

@ -1,170 +0,0 @@
const assert = require('assert');
const BucketAzureInfo = require('../../../index').models.BucketAzureInfo;
const testAzureInfoObj = {
sku: 'skuname',
accessTier: 'accessTierName',
kind: 'kindName',
systemKeys: ['key1', 'key2'],
tenantKeys: ['key3', 'key4'],
subscriptionId: 'subscriptionIdName',
resourceGroup: 'resourceGroupName',
deleteRetentionPolicy: { enabled: true, days: 14 },
managementPolicies: [],
httpsOnly: false,
tags: { foo: 'bar' },
networkACL: [],
cname: 'www.example.com',
azureFilesAADIntegration: false,
hnsEnabled: false,
logging: {},
hourMetrics: {},
minuteMetrics: {},
serviceVersion: '2018-03-28',
};
const azureInfo = new BucketAzureInfo(testAzureInfoObj);
describe('BucketAzureInfo value', () => {
it('should return the correct value', () => {
const azureInfoObj = azureInfo.getValue();
assert.deepStrictEqual(azureInfoObj, testAzureInfoObj);
});
});
describe('BucketAzureInfo setters/getters', () => {
it('should control the sku attribute', () => {
const sku = 'new sku value';
azureInfo.setSku(sku);
assert.deepStrictEqual(azureInfo.getSku(), sku);
});
it('should control the accessTier attribute', () => {
const accessTier = 'new accessTier value';
azureInfo.setAccessTier(accessTier);
assert.deepStrictEqual(azureInfo.getAccessTier(), accessTier);
});
it('should control the kind attribute', () => {
const kind = 'new kind value';
azureInfo.setKind(kind);
assert.deepStrictEqual(azureInfo.getKind(), kind);
});
it('should control the systemKeys attribute', () => {
const systemKeys = ['newKey1', 'newKey2'];
azureInfo.setSystemKeys(systemKeys);
assert.deepStrictEqual(azureInfo.getSystemKeys(),
systemKeys);
});
it('should control the tenantKeys attribute', () => {
const tenantKeys = ['newKey3', 'newKey4'];
azureInfo.setTenantKeys(tenantKeys);
assert.deepStrictEqual(azureInfo.getTenantKeys(),
tenantKeys);
});
it('should control the subscriptionId attribute', () => {
const subscriptionId = 'new subscription value';
azureInfo.setSubscriptionId(subscriptionId);
assert.deepStrictEqual(azureInfo.getSubscriptionId(),
subscriptionId);
});
it('should control the resourceGroup attribute', () => {
const resourceGroup = 'new resource group value';
azureInfo.setResourceGroup(resourceGroup);
assert.deepStrictEqual(azureInfo.getResourceGroup(),
resourceGroup);
});
it('should control the deleteRetentionPolicy attribute', () => {
const deleteRetentionPolicy = { enabled: false };
azureInfo.setDeleteRetentionPolicy(deleteRetentionPolicy);
assert.deepStrictEqual(azureInfo.getDeleteRetentionPolicy(),
deleteRetentionPolicy);
});
it('should control the managementPolicies attribute', () => {
const managementPolicies = [{}];
azureInfo.setManagementPolicies(managementPolicies);
assert.deepStrictEqual(azureInfo.getManagementPolicies(),
managementPolicies);
});
it('should control the httpsOnly attribute', () => {
const httpsOnly = true;
azureInfo.setHttpsOnly(httpsOnly);
assert.deepStrictEqual(azureInfo.getHttpsOnly(),
httpsOnly);
});
it('should control the tags attribute', () => {
const tags = { baz: 'baz' };
azureInfo.setTags(tags);
assert.deepStrictEqual(azureInfo.getTags(),
tags);
});
it('should control the networkACL attribute', () => {
const networkACL = [{}];
azureInfo.setNetworkACL(networkACL);
assert.deepStrictEqual(azureInfo.getNetworkACL(),
networkACL);
});
it('should control the cname attribute', () => {
const cname = 'new cname value';
azureInfo.setCname(cname);
assert.deepStrictEqual(azureInfo.getCname(),
cname);
});
it('should control the azureFilesAADIntegration attribute', () => {
const azureFilesAADIntegration = true;
azureInfo.setAzureFilesAADIntegration(azureFilesAADIntegration);
assert.deepStrictEqual(azureInfo.getAzureFilesAADIntegration(),
azureFilesAADIntegration);
});
it('should control the hnsEnabled attribute', () => {
const hnsEnabled = true;
azureInfo.setHnsEnabled(hnsEnabled);
assert.deepStrictEqual(azureInfo.getHnsEnabled(),
hnsEnabled);
});
it('should control the logging attribute', () => {
const logging = {
version: '1.0',
delete: false,
read: false,
write: false,
retentionPolicy: {
enabled: false,
days: 0,
},
};
azureInfo.setLogging(logging);
assert.deepStrictEqual(azureInfo.getLogging(), logging);
});
it('should control the hourMetrics attribute', () => {
const hourMetrics = {
version: '1.0',
enabled: false,
includeAPIs: false,
retentionPolicy: {
enabled: false,
days: 0,
},
};
azureInfo.setHourMetrics(hourMetrics);
assert.deepStrictEqual(azureInfo.getHourMetrics(), hourMetrics);
});
it('should control the minuteMetrics attribute', () => {
const minuteMetrics = {
version: '1.0',
enabled: false,
includeAPIs: false,
retentionPolicy: {
enabled: false,
days: 0,
},
};
azureInfo.setMinuteMetrics(minuteMetrics);
assert.deepStrictEqual(azureInfo.getMinuteMetrics(), minuteMetrics);
});
it('should control the serviceVersion attribute', () => {
const serviceVersion = '2019-08-01';
azureInfo.setServiceVersion(serviceVersion);
assert.deepStrictEqual(azureInfo.getServiceVersion(), serviceVersion);
});
});

View File

@ -59,8 +59,6 @@ const testWebsiteConfiguration = new WebsiteConfiguration({
}); });
const testLocationConstraint = 'us-west-1'; const testLocationConstraint = 'us-west-1';
const testReadLocationConstraint = 'us-west-2';
const testLocationConstraintIngest = 'us-west-3:ingest';
const testCorsConfiguration = [ const testCorsConfiguration = [
{ id: 'test', { id: 'test',
@ -118,25 +116,7 @@ const testLifecycleConfiguration = {
], ],
}; };
const testIngestionConfiguration = { status: 'enabled' };
const testUid = '99ae3446-7082-4c17-ac97-52965dc004ec'; const testUid = '99ae3446-7082-4c17-ac97-52965dc004ec';
const testAzureInfo = {
sku: 'skuname',
accessTier: 'accessTierName',
kind: 'kindName',
systemKeys: ['key1', 'key2'],
tenantKeys: ['key1', 'key2'],
subscriptionId: 'subscriptionIdName',
resourceGroup: 'resourceGroupName',
deleteRetentionPolicy: { enabled: true, days: 14 },
managementPolicies: [],
httpsOnly: false,
tags: { foo: 'bar' },
networkACL: [],
cname: 'www.example.com',
azureFilesAADIntegration: false,
hnsEnabled: false,
};
const testBucketPolicy = { const testBucketPolicy = {
Version: '2012-10-17', Version: '2012-10-17',
@ -201,8 +181,8 @@ Object.keys(acl).forEach(
testCorsConfiguration, testCorsConfiguration,
testReplicationConfiguration, testReplicationConfiguration,
testLifecycleConfiguration, testLifecycleConfiguration,
testBucketPolicy, testUid, undefined, testBucketPolicy,
true, undefined, testAzureInfo, testUid,
testobjectLockEnabled, testobjectLockEnabled,
testObjectLockConfiguration, testObjectLockConfiguration,
testNotificationConfiguration); testNotificationConfiguration);
@ -224,7 +204,6 @@ Object.keys(acl).forEach(
versioningConfiguration: versioningConfiguration:
dummyBucket._versioningConfiguration, dummyBucket._versioningConfiguration,
locationConstraint: dummyBucket._locationConstraint, locationConstraint: dummyBucket._locationConstraint,
readLocationConstraint: dummyBucket._readLocationConstraint,
websiteConfiguration: dummyBucket._websiteConfiguration websiteConfiguration: dummyBucket._websiteConfiguration
.getConfig(), .getConfig(),
cors: dummyBucket._cors, cors: dummyBucket._cors,
@ -234,9 +213,6 @@ Object.keys(acl).forEach(
dummyBucket._lifecycleConfiguration, dummyBucket._lifecycleConfiguration,
bucketPolicy: dummyBucket._bucketPolicy, bucketPolicy: dummyBucket._bucketPolicy,
uid: dummyBucket._uid, uid: dummyBucket._uid,
isNFS: dummyBucket._isNFS,
ingestion: dummyBucket._ingestion,
azureInfo: dummyBucket._azureInfo,
objectLockEnabled: dummyBucket._objectLockEnabled, objectLockEnabled: dummyBucket._objectLockEnabled,
objectLockConfiguration: objectLockConfiguration:
dummyBucket._objectLockConfiguration, dummyBucket._objectLockConfiguration,
@ -256,57 +232,7 @@ Object.keys(acl).forEach(
}); });
}); });
describe('fromObj on BucketInfo class', () => {
it('should create BucketInfo instance from fromObj', done => {
const dataObj = {
_acl: dummyBucket._acl,
_name: dummyBucket._name,
_owner: dummyBucket._owner,
_ownerDisplayName: dummyBucket._ownerDisplayName,
_creationDate: dummyBucket._creationDate,
_mdBucketModelVersion: dummyBucket._mdBucketModelVersion,
_transient: dummyBucket._transient,
_deleted: dummyBucket._deleted,
_serverSideEncryption: dummyBucket._serverSideEncryption,
_versioningConfiguration:
dummyBucket._versioningConfiguration,
_locationConstraint: dummyBucket._locationConstraint,
_readLocationConstraint: dummyBucket._readLocationConstraint,
_websiteConfiguration: testWebsiteConfiguration,
_cors: dummyBucket._cors,
_replicationConfiguration:
dummyBucket._replicationConfiguration,
_lifecycleConfiguration:
dummyBucket._lifecycleConfiguration,
_bucketPolicy: dummyBucket._bucketPolicy,
_uid: dummyBucket._uid,
_isNFS: dummyBucket._isNFS,
_ingestion: dummyBucket._ingestion,
_azureInfo: dummyBucket._azureInfo,
_objectLockEnabled: dummyBucket._objectLockEnabled,
_objectLockConfiguration:
dummyBucket._objectLockConfiguration,
_notificationConfiguration:
dummyBucket._notificationConfiguration,
};
const fromObj = BucketInfo.fromObj(dataObj);
assert(fromObj instanceof BucketInfo);
assert.deepStrictEqual(fromObj, dummyBucket);
done();
});
});
describe('constructor', () => { describe('constructor', () => {
it('this should have the right BucketInfo types',
() => {
assert.strictEqual(typeof dummyBucket.getName(), 'string');
assert.strictEqual(typeof dummyBucket.getOwner(), 'string');
assert.strictEqual(typeof dummyBucket.getOwnerDisplayName(),
'string');
assert.strictEqual(typeof dummyBucket.getCreationDate(),
'string');
assert.strictEqual(typeof dummyBucket.getUid(), 'string');
});
it('this should have the right BucketInfo types', () => { it('this should have the right BucketInfo types', () => {
assert.strictEqual(typeof dummyBucket.getName(), 'string'); assert.strictEqual(typeof dummyBucket.getName(), 'string');
assert.strictEqual(typeof dummyBucket.getOwner(), 'string'); assert.strictEqual(typeof dummyBucket.getOwner(), 'string');
@ -383,18 +309,6 @@ Object.keys(acl).forEach(
assert.deepStrictEqual(dummyBucket.getLocationConstraint(), assert.deepStrictEqual(dummyBucket.getLocationConstraint(),
testLocationConstraint); testLocationConstraint);
}); });
it('getReadLocationConstraint should return locationConstraint ' +
'if readLocationConstraint hasn\'t been set', () => {
assert.deepStrictEqual(dummyBucket.getReadLocationConstraint(),
testLocationConstraint);
});
it('getReadLocationConstraint should return readLocationConstraint',
() => {
dummyBucket._readLocationConstraint =
testReadLocationConstraint;
assert.deepStrictEqual(dummyBucket.getReadLocationConstraint(),
testReadLocationConstraint);
});
it('getCors should return CORS configuration', () => { it('getCors should return CORS configuration', () => {
assert.deepStrictEqual(dummyBucket.getCors(), assert.deepStrictEqual(dummyBucket.getCors(),
testCorsConfiguration); testCorsConfiguration);
@ -410,17 +324,6 @@ Object.keys(acl).forEach(
it('getUid should return unique id of bucket', () => { it('getUid should return unique id of bucket', () => {
assert.deepStrictEqual(dummyBucket.getUid(), testUid); assert.deepStrictEqual(dummyBucket.getUid(), testUid);
}); });
it('isNFS should return whether bucket is on NFS', () => {
assert.deepStrictEqual(dummyBucket.isNFS(), true);
});
it('setIsNFS should set whether bucket is on NFS', () => {
dummyBucket.setIsNFS(false);
assert.deepStrictEqual(dummyBucket.isNFS(), false);
});
it('getAzureInfo should return the expected structure', () => {
const azureInfo = dummyBucket.getAzureInfo();
assert.deepStrictEqual(azureInfo, testAzureInfo);
});
it('object lock should be disabled by default', () => { it('object lock should be disabled by default', () => {
assert.deepStrictEqual( assert.deepStrictEqual(
dummyBucket.isObjectLockEnabled(), false); dummyBucket.isObjectLockEnabled(), false);
@ -504,7 +407,8 @@ Object.keys(acl).forEach(
protocol: 'https', protocol: 'https',
}, },
}; };
dummyBucket.setWebsiteConfiguration(newWebsiteConfiguration); dummyBucket
.setWebsiteConfiguration(newWebsiteConfiguration);
assert.deepStrictEqual(dummyBucket.getWebsiteConfiguration(), assert.deepStrictEqual(dummyBucket.getWebsiteConfiguration(),
newWebsiteConfiguration); newWebsiteConfiguration);
}); });
@ -569,22 +473,6 @@ Object.keys(acl).forEach(
assert.deepStrictEqual( assert.deepStrictEqual(
dummyBucket.getBucketPolicy(), newBucketPolicy); dummyBucket.getBucketPolicy(), newBucketPolicy);
}); });
it('enableIngestion should set ingestion status to enabled', () => {
dummyBucket.enableIngestion();
assert.deepStrictEqual(dummyBucket.getIngestion(),
{ status: 'enabled' });
});
it('disableIngestion should set ingestion status to null', () => {
dummyBucket.disableIngestion();
assert.deepStrictEqual(dummyBucket.getIngestion(),
{ status: 'disabled' });
});
it('setAzureInfo should work', () => {
const dummyAzureInfo = {};
dummyBucket.setAzureInfo(dummyAzureInfo);
const azureInfo = dummyBucket.getAzureInfo();
assert.deepStrictEqual(azureInfo, dummyAzureInfo);
});
it('setObjectLockConfiguration should set object lock ' + it('setObjectLockConfiguration should set object lock ' +
'configuration', () => { 'configuration', () => {
const newObjectLockConfig = { const newObjectLockConfig = {
@ -631,77 +519,5 @@ Object.keys(acl).forEach(
dummyBucket.getUid(), testUid); dummyBucket.getUid(), testUid);
}); });
}); });
}), })
); );
describe('uid default', () => {
it('should set uid if none is specified by constructor params', () => {
const dummyBucket = new BucketInfo(
bucketName, owner, ownerDisplayName, testDate,
BucketInfo.currentModelVersion(), acl[emptyAcl],
false, false, {
cryptoScheme: 1,
algorithm: 'sha1',
masterKeyId: 'somekey',
mandatory: true,
}, testVersioningConfiguration,
testLocationConstraint,
testWebsiteConfiguration,
testCorsConfiguration,
testReplicationConfiguration,
testLifecycleConfiguration);
const defaultUid = dummyBucket.getUid();
assert(defaultUid);
assert.strictEqual(defaultUid.length, 36);
});
});
describe('ingest', () => {
it('should enable ingestion if ingestion param sent on bucket creation',
() => {
const dummyBucket = new BucketInfo(
bucketName, owner, ownerDisplayName, testDate,
BucketInfo.currentModelVersion(), acl[emptyAcl],
false, false, {
cryptoScheme: 1,
algorithm: 'sha1',
masterKeyId: 'somekey',
mandatory: true,
}, testVersioningConfiguration,
testLocationConstraintIngest,
testWebsiteConfiguration,
testCorsConfiguration,
testReplicationConfiguration,
testLifecycleConfiguration,
testBucketPolicy,
testUid, undefined, true, testIngestionConfiguration);
assert.deepStrictEqual(dummyBucket.getIngestion(),
{ status: 'enabled' });
assert.strictEqual(dummyBucket.isIngestionBucket(), true);
assert.strictEqual(dummyBucket.isIngestionEnabled(), true);
});
it('should have ingestion as null if no ingestion param was sent on' +
'bucket creation', () => {
const dummyBucket = new BucketInfo(
bucketName, owner, ownerDisplayName, testDate,
BucketInfo.currentModelVersion(), acl[emptyAcl],
false, false, {
cryptoScheme: 1,
algorithm: 'sha1',
masterKeyId: 'somekey',
mandatory: true,
}, testVersioningConfiguration,
testLocationConstraintIngest,
testWebsiteConfiguration,
testCorsConfiguration,
testReplicationConfiguration,
testLifecycleConfiguration,
testBucketPolicy,
testUid, undefined, true);
assert.deepStrictEqual(dummyBucket.getIngestion(), null);
assert.strictEqual(dummyBucket.isIngestionBucket(), false);
assert.strictEqual(dummyBucket.isIngestionEnabled(), false);
});
});

View File

@ -1,6 +1,5 @@
const assert = require('assert'); const assert = require('assert');
const { parseString } = require('xml2js'); const { parseString } = require('xml2js');
const errors = require('../../../lib/errors');
const LifecycleConfiguration = const LifecycleConfiguration =
require('../../../lib/models/LifecycleConfiguration.js'); require('../../../lib/models/LifecycleConfiguration.js');
@ -11,18 +10,6 @@ const days = {
Expiration: 'Days', Expiration: 'Days',
}; };
const mockConfig = {
replicationEndpoints: [
{
site: 'a',
},
{
site: 'b',
},
],
};
const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer.
const date = new Date(); const date = new Date();
date.setUTCHours(0, 0, 0, 0); date.setUTCHours(0, 0, 0, 0);
@ -71,6 +58,12 @@ const requiredTags = [
{ tag: 'Action', error: 'InvalidRequest', { tag: 'Action', error: 'InvalidRequest',
errMessage: 'Rule does not include valid action' }]; errMessage: 'Rule does not include valid action' }];
const notImplementedActions = [
{ tag: 'Transition',
errMessage: 'Transition lifecycle action not yet implemented' },
{ tag: 'NoncurrentVersionTransition',
errMessage: 'Transition lifecycle action not yet implemented' }];
const invalidActions = [ const invalidActions = [
{ tag: 'AbortIncompleteMultipartUpload', label: 'no-time', { tag: 'AbortIncompleteMultipartUpload', label: 'no-time',
error: 'MalformedXML', error: 'MalformedXML',
@ -281,8 +274,8 @@ function generateParsedXml(errorTag, tagObj, cb) {
} }
function checkError(parsedXml, error, errMessage, cb) { function checkError(parsedXml, error, errMessage, cb) {
const lcConfig = new LifecycleConfiguration(parsedXml, mockConfig) const lcConfig = new LifecycleConfiguration(parsedXml).
.getLifecycleConfiguration(); getLifecycleConfiguration();
assert.strictEqual(lcConfig.error[error], true); assert.strictEqual(lcConfig.error[error], true);
assert.strictEqual(lcConfig.error.description, errMessage); assert.strictEqual(lcConfig.error.description, errMessage);
cb(); cb();
@ -308,6 +301,16 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
}); });
}); });
notImplementedActions.forEach(action => {
const expError = 'NotImplemented';
it(`should return ${expError} error for ${action.tag} action`,
done => {
generateParsedXml('Action', action, parsedXml => {
checkError(parsedXml, expError, action.errMessage, done);
});
});
});
invalidActions.forEach(a => { invalidActions.forEach(a => {
it(`should return ${a.error} for ${a.label} action error`, it(`should return ${a.error} for ${a.label} action error`,
done => { done => {
@ -358,14 +361,14 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
}); });
}); });
it('should apply all unique Key tags if multiple tags included', done => { it('should use last listed Prefix if multiple Prefixes included', done => {
tagObj.label = 'mult-tags'; tagObj.label = 'mult-prefixes';
tagObj.lastPrefix = 'coco';
generateParsedXml('Filter', tagObj, parsedXml => { generateParsedXml('Filter', tagObj, parsedXml => {
const lcConfig = new LifecycleConfiguration(parsedXml, mockConfig) const lcConfig = new LifecycleConfiguration(parsedXml).
.getLifecycleConfiguration(); getLifecycleConfiguration();
const expected = [{ key: 'color', val: 'blue' }, assert.strictEqual(tagObj.lastPrefix,
{ key: 'shape', val: 'circle' }]; lcConfig.rules[0].filter.rulePrefix);
assert.deepStrictEqual(expected, lcConfig.rules[0].filter.tags);
done(); done();
}); });
}); });
@ -382,7 +385,7 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
tagObj.label = 'empty-prefix'; tagObj.label = 'empty-prefix';
const expectedPrefix = ''; const expectedPrefix = '';
generateParsedXml('Filter', tagObj, parsedXml => { generateParsedXml('Filter', tagObj, parsedXml => {
const lcConfig = new LifecycleConfiguration(parsedXml, mockConfig). const lcConfig = new LifecycleConfiguration(parsedXml).
getLifecycleConfiguration(); getLifecycleConfiguration();
assert.strictEqual(expectedPrefix, assert.strictEqual(expectedPrefix,
lcConfig.rules[0].filter.rulePrefix); lcConfig.rules[0].filter.rulePrefix);
@ -391,472 +394,6 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
}); });
}); });
describe('LifecycleConfiguration', () => {
const lifecycleConfiguration = new LifecycleConfiguration({}, mockConfig);
function getParsedXML() {
return {
LifecycleConfiguration: {
Rule: [{
ID: ['test-id'],
Prefix: [''],
Status: ['Enabled'],
Expiration: [{
Days: 1,
}],
}],
},
};
}
describe('::_getRuleFilterDesc', () => {
it('should get Prefix', () => {
const rule = getParsedXML().LifecycleConfiguration.Rule[0];
const ruleFilter = lifecycleConfiguration._getRuleFilterDesc(rule);
assert.strictEqual(ruleFilter, "prefix ''");
});
it('should get Filter.Prefix', () => {
const rule = getParsedXML().LifecycleConfiguration.Rule[0];
delete rule.Prefix;
rule.Filter = [{ Prefix: [''] }];
const ruleFilter = lifecycleConfiguration._getRuleFilterDesc(rule);
assert.strictEqual(ruleFilter, "filter '(prefix=)'");
});
it('should get Filter.Tag', () => {
const rule = getParsedXML().LifecycleConfiguration.Rule[0];
delete rule.Prefix;
rule.Filter = [{ Tag: [{ Key: ['a'], Value: [''] }] }];
const ruleFilter = lifecycleConfiguration._getRuleFilterDesc(rule);
assert.strictEqual(ruleFilter, "filter '(tag: key=a, value=)'");
});
it('should get Filter.And', () => {
const rule = getParsedXML().LifecycleConfiguration.Rule[0];
delete rule.Prefix;
rule.Filter = [{
And: [{
Prefix: [''],
Tag: [{
Key: ['a'],
Value: ['b'],
},
{
Key: ['c'],
Value: ['d'],
}],
}],
}];
const ruleFilter = lifecycleConfiguration._getRuleFilterDesc(rule);
assert.strictEqual(ruleFilter, 'filter ' +
"'(prefix= and tag: key=a, value=b and tag: key=c, value=d)'");
});
it('should get Filter.And without Prefix', () => {
const rule = getParsedXML().LifecycleConfiguration.Rule[0];
delete rule.Prefix;
rule.Filter = [{
And: [{
Tag: [{
Key: ['a'],
Value: ['b'],
},
{
Key: ['c'],
Value: ['d'],
}],
}],
}];
const ruleFilter = lifecycleConfiguration._getRuleFilterDesc(rule);
assert.strictEqual(ruleFilter,
"filter '(tag: key=a, value=b and tag: key=c, value=d)'");
});
it('should get Filter with empty object', () => {
const rule = {
ID: ['test-id'],
Status: ['Enabled'],
Expiration: [{
Days: 1,
}],
};
rule.Filter = [{}];
const ruleFilter = lifecycleConfiguration._getRuleFilterDesc(rule);
assert.strictEqual(ruleFilter, 'filter (all)');
});
it('should get empty Filter', () => {
const rule = {
ID: ['test-id'],
Status: ['Enabled'],
Expiration: [{
Days: 1,
}],
};
rule.Filter = [];
const ruleFilter = lifecycleConfiguration._getRuleFilterDesc(rule);
assert.strictEqual(ruleFilter, 'filter (all)');
});
});
describe('::_checkDays', () => {
it(`should return no error when days value is 0 - ${MAX_DAYS}`, () => {
const error = lifecycleConfiguration._checkDays({
days: 0,
});
assert.strictEqual(error, null);
});
it('should return error when exceeding max value', () => {
const error = lifecycleConfiguration._checkDays({
days: MAX_DAYS + 1,
field: 'a',
ancestor: 'b',
});
const msg = "'a' in b action must not exceed 2147483647";
const expected = errors.MalformedXML.customizeDescription(msg);
assert.deepStrictEqual(error, expected);
});
it('should return error when negative value', () => {
const error = lifecycleConfiguration._checkDays({
days: -1,
field: 'a',
ancestor: 'b',
});
const msg = "'a' in b action must be nonnegative";
const expected = errors.InvalidArgument.customizeDescription(msg);
assert.deepStrictEqual(error, expected);
});
});
describe('::_checkStorageClasses', () => {
it('should return no error when StorageClass is first one used', () => {
const error = lifecycleConfiguration._checkStorageClasses({
usedStorageClasses: [],
storageClass: 'a',
});
assert.strictEqual(error, null);
});
it('should return no error when StorageClass has not been used', () => {
const error = lifecycleConfiguration._checkStorageClasses({
usedStorageClasses: ['a'],
storageClass: 'b',
});
assert.strictEqual(error, null);
});
it('should return error when unknown StorageClass is given', () => {
const error = lifecycleConfiguration._checkStorageClasses({
storageClass: 'c',
});
const msg = "'StorageClass' must be one of 'a', 'b'";
const expected = errors.MalformedXML.customizeDescription(msg);
assert.deepStrictEqual(error, expected);
});
it('should return error when StorageClass has been used', () => {
const error = lifecycleConfiguration._checkStorageClasses({
usedStorageClasses: ['a'],
storageClass: 'a',
field: 'a',
ancestor: 'b',
rule: getParsedXML().LifecycleConfiguration.Rule[0],
});
const msg = "'StorageClass' must be different for 'b' actions " +
"in same 'Rule' with prefix ''";
const expected = errors.InvalidRequest.customizeDescription(msg);
assert.deepStrictEqual(error, expected);
});
});
describe('::_checkTimeType', () => {
it('should return no error when first time type in rule', () => {
const error = lifecycleConfiguration._checkTimeType({
usedTimeType: null,
currentTimeType: 'Date',
rule: {},
});
assert.strictEqual(error, null);
});
it('should return no error when time type is same as others', () => {
const error = lifecycleConfiguration._checkTimeType({
usedTimeType: 'Date',
currentTimeType: 'Date',
rule: {},
});
assert.strictEqual(error, null);
});
it('should return error when time type differs from others', () => {
const error = lifecycleConfiguration._checkTimeType({
usedTimeType: 'Date',
currentTimeType: 'Days',
rule: getParsedXML().LifecycleConfiguration.Rule[0],
});
const msg = "Found mixed 'Date' and 'Days' based Transition " +
"actions in lifecycle rule for prefix ''";
const expected = errors.InvalidRequest.customizeDescription(msg);
assert.deepStrictEqual(error, expected);
});
it('should return error when time type differs across expiration',
() => {
const error = lifecycleConfiguration._checkTimeType({
usedTimeType: 'Date',
currentTimeType: 'Date',
rule: getParsedXML().LifecycleConfiguration.Rule[0],
});
const msg = "Found mixed 'Date' and 'Days' based Expiration and " +
"Transition actions in lifecycle rule for prefix ''";
const expected = errors.InvalidRequest.customizeDescription(msg);
assert.deepStrictEqual(error, expected);
});
});
describe('::_checkDate', () => {
it('should return no error valid ISO date', () => {
const date = '2016-01-01T00:00:00.000Z';
const error = lifecycleConfiguration._checkDate(date);
assert.strictEqual(error, null);
});
it('should return error when invalid ISO date', () => {
const date = 'Fri, 01 Jan 2016 00:00:00 GMT';
const error = lifecycleConfiguration._checkDate(date);
const msg = 'Date must be in ISO 8601 format';
const expected = errors.InvalidArgument.customizeDescription(msg);
assert.deepStrictEqual(error, expected);
});
});
describe('::_parseNoncurrentVersionTransition', () => {
function getRule() {
return {
NoncurrentVersionTransition: [
{
NoncurrentDays: ['0'],
StorageClass: ['a'],
},
{
NoncurrentDays: ['1'],
StorageClass: ['b'],
},
],
};
}
it('should return correctly parsed result object', () => {
const rule = getRule();
const result =
lifecycleConfiguration._parseNoncurrentVersionTransition(rule);
assert.deepStrictEqual(result, {
nonCurrentVersionTransition: [
{
noncurrentDays: 0,
storageClass: 'a',
},
{
noncurrentDays: 1,
storageClass: 'b',
},
],
});
});
it('should return parsed result object with error', () => {
const rule = getRule();
rule.NoncurrentVersionTransition[0].NoncurrentDays[0] = '-1';
const result =
lifecycleConfiguration._parseNoncurrentVersionTransition(rule);
const msg = "'NoncurrentDays' in NoncurrentVersionTransition " +
'action must be nonnegative';
const error = errors.InvalidArgument.customizeDescription(msg);
assert.deepStrictEqual(result.error, error);
});
});
describe('::_parseTransition with Days', () => {
function getRule() {
return {
Transition: [
{
Days: ['0'],
StorageClass: ['a'],
},
{
Days: ['1'],
StorageClass: ['b'],
},
],
};
}
it('should return correctly parsed result object', () => {
const rule = getRule();
const result = lifecycleConfiguration._parseTransition(rule);
assert.deepStrictEqual(result, {
transition: [
{
days: 0,
storageClass: 'a',
},
{
days: 1,
storageClass: 'b',
},
],
});
});
it('should return parsed result object with error when days is ' +
'negative', () => {
const rule = getRule();
rule.Transition[0].Days[0] = '-1';
const result = lifecycleConfiguration._parseTransition(rule);
const msg = "'Days' in Transition action must be nonnegative";
const error = errors.InvalidArgument.customizeDescription(msg);
assert.deepStrictEqual(result.error, error);
});
it('should return parsed result object with error when two ' +
'transition days are the same', () => {
const rule = getRule();
rule.Prefix = ['prefix'];
rule.Transition[1].Days[0] = '0';
const result = lifecycleConfiguration._parseTransition(rule);
const msg = "'Days' in the 'Transition' action for StorageClass " +
"'a' for prefix 'prefix' must be at least one day apart from " +
"prefix 'prefix' in the 'Transition' action for StorageClass " +
"'b'";
const error = errors.InvalidArgument.customizeDescription(msg);
assert.deepStrictEqual(result.error, error);
});
});
describe('::_parseTransition with Date', () => {
it('should return parsed result object with error when dates are not ' +
'more than one day apart', () => {
const rule = {
Prefix: ['prefix'],
Transition: [
{
Date: ['2019-01-01T00:00:00.000Z'],
StorageClass: ['a'],
},
{
Date: ['2019-01-01T23:59:59.999Z'],
StorageClass: ['b'],
},
],
};
const result = lifecycleConfiguration._parseTransition(rule);
const msg = "'Date' in the 'Transition' action for StorageClass " +
"'a' for prefix 'prefix' must be at least one day apart from " +
"prefix 'prefix' in the 'Transition' action for StorageClass " +
"'b'";
const error = errors.InvalidArgument.customizeDescription(msg);
assert.deepStrictEqual(result.error, error);
});
});
describe('::_checkTimeGap', () => {
it('should not return error when only one transition', () => {
const params = {
rule: {
Transition: [{
Days: ['0'],
StorageClass: ['a'],
}],
},
days: 0,
storageClass: 'a',
};
const error = lifecycleConfiguration._checkTimeGap(params);
assert.strictEqual(error, undefined);
});
it('should not return error when transitions have days greater than ' +
'24 hours apart', () => {
const params = {
rule: {
Transition: [{
Days: ['0'],
StorageClass: ['a'],
}, {
Days: ['1'],
StorageClass: ['b'],
}],
},
days: 0,
storageClass: 'a',
};
const error = lifecycleConfiguration._checkTimeGap(params);
assert.strictEqual(error, undefined);
});
it('should return error when transitions have same day', () => {
const params = {
rule: {
Prefix: 'prefix',
Transition: [{
Days: ['0'],
StorageClass: ['a'],
}, {
Days: ['0'],
StorageClass: ['b'],
}],
},
days: 0,
storageClass: 'a',
};
const error = lifecycleConfiguration._checkTimeGap(params);
assert(error.InvalidArgument);
});
it('should not return error when transitions have dates greater than ' +
'24 hours apart', () => {
const params = {
rule: {
Transition: [{
Date: ['2019-01-01T00:00:00.000Z'],
StorageClass: ['a'],
}, {
Date: ['2019-01-02T00:00:00.000Z'],
StorageClass: ['b'],
}],
},
date: '2019-01-01T00:00:00.000Z',
storageClass: 'a',
};
const error = lifecycleConfiguration._checkTimeGap(params);
assert.strictEqual(error, undefined);
});
it('should return error when transitions have dates less than 24 ' +
'hours apart', () => {
const params = {
rule: {
Prefix: 'prefix',
Transition: [{
Date: ['2019-01-01T00:00:00.000Z'],
StorageClass: ['a'],
}, {
Date: ['2019-01-01T23:59:59.999Z'],
StorageClass: ['b'],
}],
},
date: '2019-01-01T00:00:00.000Z',
storageClass: 'a',
};
const error = lifecycleConfiguration._checkTimeGap(params);
assert(error.InvalidArgument);
});
});
});
describe('LifecycleConfiguration::getConfigJson', () => { describe('LifecycleConfiguration::getConfigJson', () => {
const tests = [ const tests = [
[ [
@ -1207,7 +744,7 @@ describe('LifecycleConfiguration::getConfigJson', () => {
`should return correct configuration: ${msg}`, () => { `should return correct configuration: ${msg}`, () => {
assert.deepStrictEqual( assert.deepStrictEqual(
LifecycleConfiguration.getConfigJson(input), LifecycleConfiguration.getConfigJson(input),
expected, expected
); );
})); }));
}); });

View File

@ -35,9 +35,6 @@ describe('ObjectMD class setters/getters', () => {
['LastModified', new Date().toJSON()], ['LastModified', new Date().toJSON()],
['ContentMd5', null, ''], ['ContentMd5', null, ''],
['ContentMd5', 'content-md5'], ['ContentMd5', 'content-md5'],
['ContentLanguage', null, ''],
['ContentLanguage', 'content-language', ''],
['CreationTime', new Date().toJSON()],
['AmzVersionId', null, 'null'], ['AmzVersionId', null, 'null'],
['AmzVersionId', 'version-id'], ['AmzVersionId', 'version-id'],
['AmzServerVersionId', null, ''], ['AmzServerVersionId', null, ''],
@ -94,7 +91,6 @@ describe('ObjectMD class setters/getters', () => {
role: '', role: '',
storageType: '', storageType: '',
dataStoreVersionId: '', dataStoreVersionId: '',
isNFS: null,
}], }],
['ReplicationInfo', { ['ReplicationInfo', {
status: 'PENDING', status: 'PENDING',
@ -110,24 +106,8 @@ describe('ObjectMD class setters/getters', () => {
'arn:aws:iam::account-id:role/dest-resource', 'arn:aws:iam::account-id:role/dest-resource',
storageType: 'aws_s3', storageType: 'aws_s3',
dataStoreVersionId: '', dataStoreVersionId: '',
isNFS: null,
}], }],
['DataStoreName', null, ''], ['DataStoreName', null, ''],
['ReplicationIsNFS', null, null],
['ReplicationIsNFS', true],
['AzureInfo', {
containerPublicAccess: 'container',
containerStoredAccessPolicies: [],
containerImmutabilityPolicy: {},
containerLegalHoldStatus: false,
containerDeletionInProgress: false,
blobType: 'BlockBlob',
blobContentMD5: 'ABCDEF==',
blobCopyInfo: {},
blobSequenceNumber: 42,
blobAccessTierChangeTime: 'abcdef',
blobUncommitted: false,
}],
['LegalHold', null, false], ['LegalHold', null, false],
['LegalHold', true], ['LegalHold', true],
['RetentionMode', 'GOVERNANCE'], ['RetentionMode', 'GOVERNANCE'],
@ -183,11 +163,6 @@ describe('ObjectMD class setters/getters', () => {
}]); }]);
}); });
it('ObjectMD::setReplicationStorageType', () => {
md.setReplicationStorageType('a');
assert.strictEqual(md.getReplicationStorageType(), 'a');
});
it('ObjectMD::setReplicationStorageClass', () => { it('ObjectMD::setReplicationStorageClass', () => {
md.setReplicationStorageClass('a'); md.setReplicationStorageClass('a');
assert.strictEqual(md.getReplicationStorageClass(), 'a'); assert.strictEqual(md.getReplicationStorageClass(), 'a');
@ -232,65 +207,6 @@ describe('ObjectMD class setters/getters', () => {
md.getReplicationSiteDataStoreVersionId('zenko'), 'a'); md.getReplicationSiteDataStoreVersionId('zenko'), 'a');
}); });
it('ObjectMd::isMultipartUpload', () => {
md.setContentMd5('68b329da9893e34099c7d8ad5cb9c940');
assert.strictEqual(md.isMultipartUpload(), false);
md.setContentMd5('741e0f4bad5b093044dc54a74d911094-1');
assert.strictEqual(md.isMultipartUpload(), true);
md.setContentMd5('bda0c0bed89c8bdb9e409df7ae7073c5-9876');
assert.strictEqual(md.isMultipartUpload(), true);
});
it('ObjectMD::getUserMetadata', () => {
md.setUserMetadata({
'x-amz-meta-foo': 'bar',
'x-amz-meta-baz': 'qux',
// This one should be filtered out
'x-amz-storage-class': 'STANDARD_IA',
// This one should be changed to 'x-amz-meta-foobar'
'x-ms-meta-foobar': 'bar',
// ACLs are updated
'acl': {
FULL_CONTROL: ['john'],
},
});
assert.deepStrictEqual(JSON.parse(md.getUserMetadata()), {
'x-amz-meta-foo': 'bar',
'x-amz-meta-baz': 'qux',
'x-amz-meta-foobar': 'bar',
});
assert.deepStrictEqual(md.getAcl(), {
FULL_CONTROL: ['john'],
});
});
it('ObjectMD:clearMetadataValues', () => {
md.setUserMetadata({
'x-amz-meta-foo': 'bar',
});
md.clearMetadataValues();
assert.strictEqual(md.getUserMetadata(), undefined);
});
it('ObjectMD::microVersionId unset', () => {
assert.strictEqual(md.getMicroVersionId(), null);
});
it('ObjectMD::microVersionId set', () => {
const generatedIds = new Set();
for (let i = 0; i < 100; ++i) {
md.updateMicroVersionId();
generatedIds.add(md.getMicroVersionId());
}
// all generated IDs should be different
assert.strictEqual(generatedIds.size, 100);
generatedIds.forEach(key => {
// length is always 16 in hex because leading 0s are
// also encoded in the 8-byte random buffer.
assert.strictEqual(key.length, 16);
});
});
it('ObjectMD::set/getRetentionMode', () => { it('ObjectMD::set/getRetentionMode', () => {
md.setRetentionMode('COMPLIANCE'); md.setRetentionMode('COMPLIANCE');
assert.deepStrictEqual(md.getRetentionMode(), 'COMPLIANCE'); assert.deepStrictEqual(md.getRetentionMode(), 'COMPLIANCE');
@ -408,8 +324,6 @@ describe('getAttributes static method', () => {
'content-length': true, 'content-length': true,
'content-type': true, 'content-type': true,
'content-md5': true, 'content-md5': true,
'content-language': true,
'creation-time': true,
'x-amz-version-id': true, 'x-amz-version-id': true,
'x-amz-server-version-id': true, 'x-amz-server-version-id': true,
'x-amz-storage-class': true, 'x-amz-storage-class': true,
@ -420,7 +334,6 @@ describe('getAttributes static method', () => {
'acl': true, 'acl': true,
'key': true, 'key': true,
'location': true, 'location': true,
'azureInfo': true,
'isNull': true, 'isNull': true,
'nullVersionId': true, 'nullVersionId': true,
'nullUploadId': true, 'nullUploadId': true,
@ -448,8 +361,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 0, start: 0,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709', dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
dataStoreVersionId: 'someversion1',
blockId: 'someBlockId1',
}, },
{ {
key: '4e67844b674b093a9e109d42172922ea1f32ec12', key: '4e67844b674b093a9e109d42172922ea1f32ec12',
@ -457,8 +368,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 1, start: 1,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48', dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
dataStoreVersionId: 'someversion2',
blockId: 'someBlockId2',
}, },
]; ];
md.setLocation(locations); md.setLocation(locations);
@ -474,8 +383,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 0, start: 0,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709', dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
dataStoreVersionId: 'someversion',
blockId: 'someBlockId',
}, },
{ {
key: 'deebfb287cfcee1d137b0136562d2d776ba491e1', key: 'deebfb287cfcee1d137b0136562d2d776ba491e1',
@ -483,8 +390,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 1, start: 1,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709', dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
dataStoreVersionId: 'someversion',
blockId: 'someBlockId',
}, },
{ {
key: '4e67844b674b093a9e109d42172922ea1f32ec12', key: '4e67844b674b093a9e109d42172922ea1f32ec12',
@ -492,8 +397,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 2, start: 2,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48', dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
dataStoreVersionId: 'someversion2',
blockId: 'someBlockId2',
}, },
]); ]);
assert.deepStrictEqual(md.getReducedLocations(), [ assert.deepStrictEqual(md.getReducedLocations(), [
@ -503,8 +406,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 0, start: 0,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709', dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
dataStoreVersionId: 'someversion',
blockId: 'someBlockId',
}, },
{ {
key: '4e67844b674b093a9e109d42172922ea1f32ec12', key: '4e67844b674b093a9e109d42172922ea1f32ec12',
@ -512,8 +413,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 2, start: 2,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48', dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
dataStoreVersionId: 'someversion2',
blockId: 'someBlockId2',
}, },
]); ]);
}); });
@ -527,8 +426,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 0, start: 0,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709', dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
dataStoreVersionId: 'someversion',
blockId: 'someBlockId',
}, },
{ {
key: 'deebfb287cfcee1d137b0136562d2d776ba491e1', key: 'deebfb287cfcee1d137b0136562d2d776ba491e1',
@ -536,8 +433,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 1, start: 1,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48', dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
dataStoreVersionId: 'someversion2',
blockId: 'someBlockId2',
}, },
{ {
key: '4e67844b674b093a9e109d42172922ea1f32ec12', key: '4e67844b674b093a9e109d42172922ea1f32ec12',
@ -545,8 +440,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 2, start: 2,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48', dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
dataStoreVersionId: 'someversion2',
blockId: 'someBlockId2',
}, },
]); ]);
assert.deepStrictEqual(md.getReducedLocations(), [ assert.deepStrictEqual(md.getReducedLocations(), [
@ -556,8 +449,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 0, start: 0,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709', dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
dataStoreVersionId: 'someversion',
blockId: 'someBlockId',
}, },
{ {
key: '4e67844b674b093a9e109d42172922ea1f32ec12', key: '4e67844b674b093a9e109d42172922ea1f32ec12',
@ -565,8 +456,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 1, start: 1,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48', dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
dataStoreVersionId: 'someversion2',
blockId: 'someBlockId2',
}, },
]); ]);
}); });
@ -580,8 +469,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 0, start: 0,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709', dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
dataStoreVersionId: 'someversion',
blockId: 'someBlockId',
}, },
{ {
key: 'c1c1e055b19eb5a61adb8a665e626ff589cff234', key: 'c1c1e055b19eb5a61adb8a665e626ff589cff234',
@ -589,8 +476,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 1, start: 1,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709', dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
dataStoreVersionId: 'someversion',
blockId: 'someBlockId',
}, },
{ {
key: 'deebfb287cfcee1d137b0136562d2d776ba491e1', key: 'deebfb287cfcee1d137b0136562d2d776ba491e1',
@ -598,8 +483,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 3, start: 3,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709', dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
dataStoreVersionId: 'someversion',
blockId: 'someBlockId',
}, },
{ {
key: '8e67844b674b093a9e109d42172922ea1f32ec14', key: '8e67844b674b093a9e109d42172922ea1f32ec14',
@ -607,8 +490,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 4, start: 4,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48', dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
dataStoreVersionId: 'someversion2',
blockId: 'someBlockId2',
}, },
{ {
key: 'd1d1e055b19eb5a61adb8a665e626ff589cff233', key: 'd1d1e055b19eb5a61adb8a665e626ff589cff233',
@ -616,8 +497,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 7, start: 7,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48', dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
dataStoreVersionId: 'someversion2',
blockId: 'someBlockId2',
}, },
{ {
key: '0e67844b674b093a9e109d42172922ea1f32ec11', key: '0e67844b674b093a9e109d42172922ea1f32ec11',
@ -625,8 +504,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 17, start: 17,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48', dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
dataStoreVersionId: 'someversion2',
blockId: 'someBlockId2',
}, },
{ {
key: '8e67844b674b093a9e109d42172922ea1f32ec14', key: '8e67844b674b093a9e109d42172922ea1f32ec14',
@ -634,8 +511,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 27, start: 27,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '3:1ca655158ca025aa00a818b6b81f9e4c', dataStoreETag: '3:1ca655158ca025aa00a818b6b81f9e4c',
dataStoreVersionId: 'someversion3',
blockId: 'someBlockId3',
}, },
{ {
key: '7e67844b674b093a9e109d42172922ea1f32ec1f', key: '7e67844b674b093a9e109d42172922ea1f32ec1f',
@ -643,8 +518,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 42, start: 42,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '3:1ca655158ca025aa00a818b6b81f9e4c', dataStoreETag: '3:1ca655158ca025aa00a818b6b81f9e4c',
dataStoreVersionId: 'someversion3',
blockId: 'someBlockId3',
}, },
{ {
key: '1237844b674b093a9e109d42172922ea1f32ec19', key: '1237844b674b093a9e109d42172922ea1f32ec19',
@ -652,8 +525,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 44, start: 44,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '4:afa655158ca025aa00a818b6b81f9e4d', dataStoreETag: '4:afa655158ca025aa00a818b6b81f9e4d',
dataStoreVersionId: 'someversion4',
blockId: 'someBlockId4',
}, },
{ {
key: '4567844b674b093a9e109d42172922ea1f32ec00', key: '4567844b674b093a9e109d42172922ea1f32ec00',
@ -661,8 +532,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 50, start: 50,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '4:afa655158ca025aa00a818b6b81f9e4d', dataStoreETag: '4:afa655158ca025aa00a818b6b81f9e4d',
dataStoreVersionId: 'someversion4',
blockId: 'someBlockId4',
}, },
{ {
key: '53d7844b674b093a9e109d42172922ea1f32ec02', key: '53d7844b674b093a9e109d42172922ea1f32ec02',
@ -670,8 +539,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 54, start: 54,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '4:afa655158ca025aa00a818b6b81f9e4d', dataStoreETag: '4:afa655158ca025aa00a818b6b81f9e4d',
dataStoreVersionId: 'someversion4',
blockId: 'someBlockId4',
}, },
{ {
key: '6f6d7844b674b093a9e109d42172922ea1f32ec01', key: '6f6d7844b674b093a9e109d42172922ea1f32ec01',
@ -679,8 +546,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 63, start: 63,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '4:afa655158ca025aa00a818b6b81f9e4d', dataStoreETag: '4:afa655158ca025aa00a818b6b81f9e4d',
dataStoreVersionId: 'someversion4',
blockId: 'someBlockId4',
}, },
]); ]);
assert.deepStrictEqual(md.getReducedLocations(), [ assert.deepStrictEqual(md.getReducedLocations(), [
@ -690,8 +555,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 0, start: 0,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709', dataStoreETag: '1:0e5a6f42662652d44fcf978399ef5709',
dataStoreVersionId: 'someversion',
blockId: 'someBlockId',
}, },
{ {
key: '0e67844b674b093a9e109d42172922ea1f32ec11', key: '0e67844b674b093a9e109d42172922ea1f32ec11',
@ -699,8 +562,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 4, start: 4,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48', dataStoreETag: '2:9ca655158ca025aa00a818b6b81f9e48',
dataStoreVersionId: 'someversion2',
blockId: 'someBlockId2',
}, },
{ {
key: '7e67844b674b093a9e109d42172922ea1f32ec1f', key: '7e67844b674b093a9e109d42172922ea1f32ec1f',
@ -708,8 +569,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 27, start: 27,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '3:1ca655158ca025aa00a818b6b81f9e4c', dataStoreETag: '3:1ca655158ca025aa00a818b6b81f9e4c',
dataStoreVersionId: 'someversion3',
blockId: 'someBlockId3',
}, },
{ {
key: '6f6d7844b674b093a9e109d42172922ea1f32ec01', key: '6f6d7844b674b093a9e109d42172922ea1f32ec01',
@ -717,8 +576,6 @@ describe('ObjectMD::getReducedLocations', () => {
start: 44, start: 44,
dataStoreName: 'file', dataStoreName: 'file',
dataStoreETag: '4:afa655158ca025aa00a818b6b81f9e4d', dataStoreETag: '4:afa655158ca025aa00a818b6b81f9e4d',
dataStoreVersionId: 'someversion4',
blockId: 'someBlockId4',
}, },
]); ]);
}); });

View File

@ -1,102 +0,0 @@
const assert = require('assert');
const ObjectMDAzureInfo = require('../../../index').models.ObjectMDAzureInfo;
const testAzureInfoObj = {
containerPublicAccess: 'container',
containerStoredAccessPolicies: [],
containerImmutabilityPolicy: {},
containerLegalHoldStatus: false,
containerDeletionInProgress: false,
blobType: 'BlockBlob',
blobContentMD5: 'ABCDEF==',
blobIssuedETag: '0xabcdef',
blobCopyInfo: {},
blobSequenceNumber: 42,
blobAccessTierChangeTime: 'abcdef',
blobUncommitted: false,
};
const azureInfo = new ObjectMDAzureInfo(testAzureInfoObj);
describe('ObjectMDAzureInfo value', () => {
it('should return the correct value', () => {
const azureInfoObj = azureInfo.getValue();
assert.deepStrictEqual(azureInfoObj, testAzureInfoObj);
});
});
describe('ObjectMDAzureInfo setters/getters', () => {
it('should control the containerPublicAccess attribute', () => {
const containerPublicAccess = 'new public access value';
azureInfo.setContainerPublicAccess(containerPublicAccess);
assert.deepStrictEqual(azureInfo.getContainerPublicAccess(),
containerPublicAccess);
});
it('should control the containerStoredAccessPolicies attribute', () => {
const containerStoredAccessPolicies = [{}];
azureInfo.setContainerStoredAccessPolicies(
containerStoredAccessPolicies);
assert.deepStrictEqual(azureInfo.getContainerStoredAccessPolicies(),
containerStoredAccessPolicies);
});
it('should control the containerImmutabilityPolicy attribute', () => {
const containerImmutabilityPolicy = { foo: 1 };
azureInfo.setContainerImmutabilityPolicy(containerImmutabilityPolicy);
assert.deepStrictEqual(azureInfo.getContainerImmutabilityPolicy(),
containerImmutabilityPolicy);
});
it('should control the containerLegalHoldStatus attribute', () => {
const containerLegalHoldStatus = true;
azureInfo.setContainerLegalHoldStatus(containerLegalHoldStatus);
assert.deepStrictEqual(azureInfo.getContainerLegalHoldStatus(),
containerLegalHoldStatus);
});
it('should control the containerDeletionInProgress attribute', () => {
const containerDeletionInProgress = true;
azureInfo.setContainerDeletionInProgress(containerDeletionInProgress);
assert.deepStrictEqual(azureInfo.getContainerDeletionInProgress(),
containerDeletionInProgress);
});
it('should control the blobType attribute', () => {
const blobType = 'PlopBlob';
azureInfo.setBlobType(blobType);
assert.deepStrictEqual(azureInfo.getBlobType(),
blobType);
});
it('should control the blobContentMD5 attribute', () => {
const blobContentMD5 = 'ABC';
azureInfo.setBlobContentMD5(blobContentMD5);
assert.deepStrictEqual(azureInfo.getBlobContentMD5(),
blobContentMD5);
});
it('should control the blobIssuedETag attribute', () => {
const blobIssuedETag = '0x123456';
azureInfo.setBlobIssuedETag(blobIssuedETag);
assert.deepStrictEqual(azureInfo.getBlobIssuedETag(),
blobIssuedETag);
});
it('should control the blobCopyInfo attribute', () => {
const blobCopyInfo = { meh: 46 };
azureInfo.setBlobCopyInfo(blobCopyInfo);
assert.deepStrictEqual(azureInfo.getBlobCopyInfo(),
blobCopyInfo);
});
it('should control the blobSequenceNumber attribute', () => {
const blobSequenceNumber = 8888;
azureInfo.setBlobSequenceNumber(blobSequenceNumber);
assert.deepStrictEqual(azureInfo.getBlobSequenceNumber(),
blobSequenceNumber);
});
it('should control the blobAccessTierChangeTime attribute', () => {
const blobAccessTierChangeTime = 'MMXIX';
azureInfo.setBlobAccessTierChangeTime(blobAccessTierChangeTime);
assert.deepStrictEqual(azureInfo.getBlobAccessTierChangeTime(),
blobAccessTierChangeTime);
});
it('should control the blobUncommitted attribute', () => {
const blobUncommitted = true;
azureInfo.setBlobUncommitted(blobUncommitted);
assert.deepStrictEqual(azureInfo.getBlobUncommitted(),
blobUncommitted);
});
});

View File

@ -9,8 +9,6 @@ describe('ObjectMDLocation', () => {
size: 100, size: 100,
dataStoreName: 'awsbackend', dataStoreName: 'awsbackend',
dataStoreETag: '2:abcdefghi', dataStoreETag: '2:abcdefghi',
dataStoreVersionId: 'someversion',
blockId: 'someBlockId',
cryptoScheme: 1, cryptoScheme: 1,
cipheredDataKey: 'CiPhErEdDaTaKeY', cipheredDataKey: 'CiPhErEdDaTaKeY',
}; };
@ -18,12 +16,10 @@ describe('ObjectMDLocation', () => {
assert.strictEqual(location.getKey(), 'fookey'); assert.strictEqual(location.getKey(), 'fookey');
assert.strictEqual(location.getDataStoreName(), 'awsbackend'); assert.strictEqual(location.getDataStoreName(), 'awsbackend');
assert.strictEqual(location.getDataStoreETag(), '2:abcdefghi'); assert.strictEqual(location.getDataStoreETag(), '2:abcdefghi');
assert.strictEqual(location.getDataStoreVersionId(), 'someversion');
assert.strictEqual(location.getPartNumber(), 2); assert.strictEqual(location.getPartNumber(), 2);
assert.strictEqual(location.getPartETag(), 'abcdefghi'); assert.strictEqual(location.getPartETag(), 'abcdefghi');
assert.strictEqual(location.getPartStart(), 42); assert.strictEqual(location.getPartStart(), 42);
assert.strictEqual(location.getPartSize(), 100); assert.strictEqual(location.getPartSize(), 100);
assert.strictEqual(location.getBlockId(), 'someBlockId');
assert.strictEqual(location.getCryptoScheme(), 1); assert.strictEqual(location.getCryptoScheme(), 1);
assert.strictEqual(location.getCipheredDataKey(), 'CiPhErEdDaTaKeY'); assert.strictEqual(location.getCipheredDataKey(), 'CiPhErEdDaTaKeY');
@ -40,8 +36,6 @@ describe('ObjectMDLocation', () => {
size: 100, size: 100,
dataStoreName: 'awsbackend', dataStoreName: 'awsbackend',
dataStoreETag: '2:abcdefghi', dataStoreETag: '2:abcdefghi',
dataStoreVersionId: 'someversion',
blockId: 'someBlockId',
cryptoScheme: 1, cryptoScheme: 1,
cipheredDataKey: 'CiPhErEdDaTaKeY', cipheredDataKey: 'CiPhErEdDaTaKeY',
}); });
@ -49,7 +43,6 @@ describe('ObjectMDLocation', () => {
dataStoreName: 'gcpbackend' }); dataStoreName: 'gcpbackend' });
assert.strictEqual(location.getKey(), 'secondkey'); assert.strictEqual(location.getKey(), 'secondkey');
assert.strictEqual(location.getDataStoreName(), 'gcpbackend'); assert.strictEqual(location.getDataStoreName(), 'gcpbackend');
assert.strictEqual(location.getDataStoreVersionId(), undefined);
assert.strictEqual(location.getCryptoScheme(), undefined); assert.strictEqual(location.getCryptoScheme(), undefined);
assert.strictEqual(location.getCipheredDataKey(), undefined); assert.strictEqual(location.getCipheredDataKey(), undefined);
assert.deepStrictEqual(location.getValue(), { assert.deepStrictEqual(location.getValue(), {
@ -58,19 +51,14 @@ describe('ObjectMDLocation', () => {
key: 'secondkey', key: 'secondkey',
size: 100, size: 100,
start: 42, start: 42,
blockId: 'someBlockId',
}); });
location.setDataLocation({ key: 'thirdkey', location.setDataLocation({ key: 'thirdkey',
dataStoreName: 'azurebackend', dataStoreName: 'azurebackend',
dataStoreVersionId: 'newversion',
cryptoScheme: 1, cryptoScheme: 1,
cipheredDataKey: 'NeWcIpHeReDdAtAkEy' }); cipheredDataKey: 'NeWcIpHeReDdAtAkEy' });
assert.strictEqual(location.getKey(), 'thirdkey'); assert.strictEqual(location.getKey(), 'thirdkey');
assert.strictEqual(location.getDataStoreName(), 'azurebackend'); assert.strictEqual(location.getDataStoreName(), 'azurebackend');
assert.strictEqual(location.getDataStoreVersionId(), 'newversion');
assert.strictEqual(location.getCryptoScheme(), 1); assert.strictEqual(location.getCryptoScheme(), 1);
assert.strictEqual(location.getCipheredDataKey(), 'NeWcIpHeReDdAtAkEy'); assert.strictEqual(location.getCipheredDataKey(), 'NeWcIpHeReDdAtAkEy');
location.setBlockId('otherBlockId');
assert.strictEqual(location.getBlockId(), 'otherBlockId');
}); });
}); });

View File

@ -1,74 +0,0 @@
const assert = require('assert');
const { parseString } = require('xml2js');
const werelogs = require('werelogs');
const ReplicationConfiguration =
require('../../../lib/models/ReplicationConfiguration');
const logger = new werelogs.Logger('test:ReplicationConfiguration');
const mockedConfig = {
replicationEndpoints: [{
type: 'scality',
site: 'ring',
default: true,
}, {
type: 'aws_s3',
site: 'awsbackend',
}, {
type: 'gcp',
site: 'gcpbackend',
}, {
type: 'azure',
site: 'azurebackend',
}],
};
function getXMLConfig(hasPreferredRead) {
return `
<ReplicationConfiguration>
<Role>arn:aws:iam::root:role/s3-replication-role</Role>
<Rule>
<ID>Replication-Rule-1</ID>
<Status>Enabled</Status>
<Prefix>someprefix/</Prefix>
<Destination>
<Bucket>arn:aws:s3:::destbucket</Bucket>
<StorageClass>awsbackend,` +
`gcpbackend${hasPreferredRead ? ':preferred_read' : ''},azurebackend` +
`</StorageClass>
</Destination>
</Rule>
</ReplicationConfiguration>
`;
}
describe('ReplicationConfiguration class', () => {
it('should parse replication config XML without preferred read', done => {
const repConfigXML = getXMLConfig(false);
parseString(repConfigXML, (err, parsedXml) => {
assert.ifError(err);
const repConf = new ReplicationConfiguration(
parsedXml, logger, mockedConfig);
const repConfErr = repConf.parseConfiguration();
assert.ifError(repConfErr);
assert.strictEqual(repConf.getPreferredReadLocation(), null);
done();
});
});
it('should parse replication config XML with preferred read', done => {
const repConfigXML = getXMLConfig(true);
parseString(repConfigXML, (err, parsedXml) => {
assert.ifError(err);
const repConf = new ReplicationConfiguration(
parsedXml, logger, mockedConfig);
const repConfErr = repConf.parseConfiguration();
assert.ifError(repConfErr);
assert.strictEqual(repConf.getPreferredReadLocation(),
'gcpbackend');
done();
});
});
});

View File

@ -1,212 +0,0 @@
const assert = require('assert');
const HealthProbeServer =
require('../../../../lib/network/probe/HealthProbeServer');
const http = require('http');
function makeRequest(meth, uri) {
const params = {
hostname: 'localhost',
port: 4042,
method: meth,
path: uri,
};
const req = http.request(params);
req.setNoDelay(true);
return req;
}
const healthcheckEndpoints = [
'/_/health/liveness',
'/_/health/readiness',
];
const badHealthcheckEndpoints = [
'/_/health/liveness_thisiswrong',
'/_/health/readiness_thisiswrong',
];
describe('network.probe.HealthProbeServer', () => {
describe('service is "up"', () => {
let server;
function setup(done) {
server = new HealthProbeServer({ port: 4042 });
server._cbOnListening = done;
server.start();
}
beforeAll(done => {
setup(done);
});
afterAll(done => {
server.stop();
done();
});
healthcheckEndpoints.forEach(ep => {
it('should perform a GET and ' +
'return 200 OK', done => {
makeRequest('GET', ep)
.on('response', res => {
assert(res.statusCode === 200);
done();
})
.on('error', err => {
assert.ifError(err);
done();
}).end();
});
});
});
describe('service is "down"', () => {
let server;
function setup(done) {
function falseStub() {
return false;
}
server = new HealthProbeServer({
port: 4042,
livenessCheck: falseStub,
readinessCheck: falseStub,
});
server.start();
done();
}
beforeAll(done => {
setup(done);
});
afterAll(done => {
server.stop();
done();
});
healthcheckEndpoints.forEach(ep => {
it('should perform a GET and ' +
'return 503 ServiceUnavailable', done => {
makeRequest('GET', ep)
.on('response', res => {
assert(res.statusCode === 503);
done();
})
.on('error', err => {
assert.ifError(err);
done();
}).end();
});
});
});
describe('Invalid Methods', () => {
jest.setTimeout(10000);
let server;
function setup(done) {
server = new HealthProbeServer({
port: 4042,
});
server.start();
done();
}
beforeAll(done => {
setup(done);
});
afterAll(done => {
server.stop();
done();
});
healthcheckEndpoints.forEach(ep => {
it('should perform a POST and ' +
'return 405 MethodNotAllowed', done => {
makeRequest('POST', ep)
.on('response', res => {
assert(res.statusCode === 405);
done();
})
.on('error', err => {
assert.ifError(err);
done();
}).end();
});
});
});
describe('Invalid URI', () => {
let server;
function setup(done) {
server = new HealthProbeServer({
port: 4042,
});
server.start();
done();
}
beforeAll(done => {
setup(done);
});
afterAll(done => {
server.stop();
done();
});
badHealthcheckEndpoints.forEach(ep => {
it('should perform a GET and ' +
'return 400 InvalidURI', done => {
makeRequest('GET', ep)
.on('response', res => {
assert(res.statusCode === 400);
done();
})
.on('error', err => {
assert.ifError(err);
done();
}).end();
});
});
});
describe('metrics route', () => {
let server;
function setup(done) {
server = new HealthProbeServer({ port: 4042 });
server._cbOnListening = done;
server.start();
}
beforeAll(done => {
setup(done);
});
afterAll(done => {
server.stop();
done();
});
it('should expose metrics', done => {
makeRequest('GET', '/_/monitoring/metrics')
.on('response', res => {
assert(res.statusCode === 200);
const respBufs = [];
res.on('data', data => {
respBufs.push(data);
});
res.on('end', () => {
const respContents = respBufs.join('');
assert(respContents.length > 0);
done();
});
res.on('error', err => {
assert.ifError(err);
done();
});
})
.on('error', err => {
assert.ifError(err);
done();
}).end();
});
});
});

View File

@ -52,7 +52,7 @@ describe('network.probe.ProbeServer', () => {
}); });
}); });
it('allows probe to handle requests', done => { it('does nothing if probe successful', done => {
server.addHandler('/check', res => { server.addHandler('/check', res => {
res.writeHead(200); res.writeHead(200);
res.end(); res.end();
@ -87,4 +87,20 @@ describe('network.probe.ProbeServer', () => {
} }
}); });
}); });
it('500 response on bad probe', done => {
server.addHandler('/check', () => 'check failed');
makeRequest('GET', '/check', (err, res) => {
assert.ifError(err);
assert.strictEqual(res.statusCode, 500);
res.setEncoding('utf8');
res.on('data', body => {
assert.strictEqual(
body,
'{"errorType":"InternalError","errorMessage":"check failed"}',
);
done();
});
});
});
}); });

View File

@ -1,74 +0,0 @@
const assert = require('assert');
const errors = require('../../../../lib/errors');
const { sendError, sendSuccess } = require('../../../../lib/network/probe/Utils');
const sinon = require('sinon');
describe('network.probe.Utils', () => {
let mockLogger;
beforeEach(() => {
mockLogger = {
debug: sinon.fake(),
};
});
afterEach(() => {
sinon.restore();
});
it('send success will return 200 OK', done => {
const mockRes = {
writeHead: sinon.fake(status => assert.strictEqual(200, status)),
end: sinon.fake(msg => {
assert.strictEqual(msg, 'OK');
done();
}),
};
sendSuccess(mockRes, mockLogger);
});
it('send success will return 200 and optional message', done => {
const mockRes = {
writeHead: sinon.fake(status => assert.strictEqual(200, status)),
end: sinon.fake(msg => {
assert.strictEqual(msg, 'Granted');
done();
}),
};
sendSuccess(mockRes, mockLogger, 'Granted');
});
it('send error will return send an Arsenal Error and code', done => {
const mockRes = {
writeHead: sinon.fake(status => assert.strictEqual(405, status)),
end: sinon.fake(msg => {
assert.deepStrictEqual(
JSON.parse(msg),
{
errorType: 'MethodNotAllowed',
errorMessage: errors.MethodNotAllowed.description,
},
);
done();
}),
};
sendError(mockRes, mockLogger, errors.MethodNotAllowed);
});
it('send error will return send an Arsenal Error and code using optional message', done => {
const mockRes = {
writeHead: sinon.fake(status => assert.strictEqual(405, status)),
end: sinon.fake(msg => {
assert.deepStrictEqual(
JSON.parse(msg),
{
errorType: 'MethodNotAllowed',
errorMessage: 'Very much not allowed',
},
);
done();
}),
};
sendError(mockRes, mockLogger, errors.MethodNotAllowed, 'Very much not allowed');
});
});

View File

@ -1,31 +0,0 @@
'use strict'; // eslint-disable-line
const assert = require('assert');
const constants = require('../../../../lib/constants');
const { parseURL } = require('../../../../lib/network/rest/utils');
describe('parseURL function', () => {
[
{
inputUrl: `${constants.passthroughFileURL}/test`,
expectedKey: 'test',
},
{
inputUrl: `${constants.passthroughFileURL}/test with spaces`,
expectedKey: 'test with spaces',
},
{
inputUrl: `${constants.passthroughFileURL}` +
'/test%20with%20encoded%20spaces',
expectedKey: 'test with encoded spaces',
},
].forEach(testCase => {
const { inputUrl, expectedKey } = testCase;
it(`should return ${expectedKey} with url "${inputUrl}"`,
() => {
const pathInfo = parseURL(inputUrl, true);
assert.strictEqual(pathInfo.key, expectedKey);
});
});
});

View File

@ -1,9 +0,0 @@
{
"privateKey": "-----BEGIN RSA PRIVATE KEY-----\r\nMIIEowIBAAKCAQEAj13sSYE40lAX2qpBvfdGfcSVNtBf8i5FH+E8FAhORwwPu+2S\r\n3yBQbgwHq30WWxunGb1NmZL1wkVZ+vf12DtxqFRnMA08LfO4oO6oC4V8XfKeuHyJ\r\n1qlaKRINz6r9yDkTHtwWoBnlAINurlcNKgGD5p7D+G26Chbr/Oo0ZwHula9DxXy6\r\neH8/bJ5/BynyNyyWRPoAO+UkUdY5utkFCUq2dbBIhovMgjjikf5p2oWqnRKXc+JK\r\nBegr6lSHkkhyqNhTmd8+wA+8Cace4sy1ajY1t5V4wfRZea5vwl/HlyyKodvHdxng\r\nJgg6H61JMYPkplY6Gr9OryBKEAgq02zYoYTDfwIDAQABAoIBAAuDYGlavkRteCzw\r\nRU1LIVcSRWVcgIgDXTu9K8T0Ec0008Kkxomyn6LmxmroJbZ1VwsDH8s4eRH73ckA\r\nxrZxt6Pr+0lplq6eBvKtl8MtGhq1VDe+kJczjHEF6SQHOFAu/TEaPZrn2XMcGvRX\r\nO1BnRL9tepFlxm3u/06VRFYNWqqchM+tFyzLu2AuiuKd5+slSX7KZvVgdkY1ErKH\r\ngB75lPyhPb77C/6ptqUisVMSO4JhLhsD0+ekDVY982Sb7KkI+szdWSbtMx9Ek2Wo\r\ntXwJz7I8T7IbODy9aW9G+ydyhMDFmaEYIaDVFKJj5+fluNza3oQ5PtFNVE50GQJA\r\nsisGqfECgYEAwpkwt0KpSamSEH6qknNYPOwxgEuXWoFVzibko7is2tFPvY+YJowb\r\n68MqHIYhf7gHLq2dc5Jg1TTbGqLECjVxp4xLU4c95KBy1J9CPAcuH4xQLDXmeLzP\r\nJ2YgznRocbzAMCDAwafCr3uY9FM7oGDHAi5bE5W11xWx+9MlFExL3JkCgYEAvJp5\r\nf+JGN1W037bQe2QLYUWGszewZsvplnNOeytGQa57w4YdF42lPhMz6Kc/zdzKZpN9\r\njrshiIDhAD5NCno6dwqafBAW9WZl0sn7EnlLhD4Lwm8E9bRHnC9H82yFuqmNrzww\r\nzxBCQogJISwHiVz4EkU48B283ecBn0wT/fAa19cCgYEApKWsnEHgrhy1IxOpCoRh\r\nUhqdv2k1xDPN/8DUjtnAFtwmVcLa/zJopU/Zn4y1ZzSzjwECSTi+iWZRQ/YXXHPf\r\nl92SFjhFW92Niuy8w8FnevXjF6T7PYiy1SkJ9OR1QlZrXc04iiGBDazLu115A7ce\r\nanACS03OLw+CKgl6Q/RR83ECgYBCUngDVoimkMcIHHt3yJiP3ikeAKlRnMdJlsa0\r\nXWVZV4hCG3lDfRXsnEgWuimftNKf+6GdfYSvQdLdiQsCcjT5A4uLsQTByv5nf4uA\r\n1ZKOsFrmRrARzxGXhLDikvj7yP//7USkq+0BBGFhfuAvl7fMhPceyPZPehqB7/jf\r\nxX1LBQKBgAn5GgSXzzS0e06ZlP/VrKxreOHa5Z8wOmqqYQ0QTeczAbNNmuITdwwB\r\nNkbRqpVXRIfuj0BQBegAiix8om1W4it0cwz54IXBwQULxJR1StWxj3jo4QtpMQ+z\r\npVPdB1Ilb9zPV1YvDwRfdS1xsobzznAx56ecsXduZjs9mF61db8Q\r\n-----END RSA PRIVATE KEY-----\r\n",
"publicKey": "-----BEGIN PUBLIC KEY-----\r\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAj13sSYE40lAX2qpBvfdG\r\nfcSVNtBf8i5FH+E8FAhORwwPu+2S3yBQbgwHq30WWxunGb1NmZL1wkVZ+vf12Dtx\r\nqFRnMA08LfO4oO6oC4V8XfKeuHyJ1qlaKRINz6r9yDkTHtwWoBnlAINurlcNKgGD\r\n5p7D+G26Chbr/Oo0ZwHula9DxXy6eH8/bJ5/BynyNyyWRPoAO+UkUdY5utkFCUq2\r\ndbBIhovMgjjikf5p2oWqnRKXc+JKBegr6lSHkkhyqNhTmd8+wA+8Cace4sy1ajY1\r\nt5V4wfRZea5vwl/HlyyKodvHdxngJgg6H61JMYPkplY6Gr9OryBKEAgq02zYoYTD\r\nfwIDAQAB\r\n-----END PUBLIC KEY-----\r\n",
"accessKey": "QXP3VDG3SALNBX2QBJ1C",
"secretKey": "K5FyqZo5uFKfw9QBtn95o6vuPuD0zH/1seIrqPKqGnz8AxALNSx6EeRq7G1I6JJpS1XN13EhnwGn2ipsml3Uf2fQ00YgEmImG8wzGVZm8fWotpVO4ilN4JGyQCah81rNX4wZ9xHqDD7qYR5MyIERxR/osoXfctOwY7GGUjRKJfLOguNUlpaovejg6mZfTvYAiDF+PTO1sKUYqHt1IfKQtsK3dov1EFMBB5pWM7sVfncq/CthKN5M+VHx9Y87qdoP3+7AW+RCBbSDOfQgxvqtS7PIAf10mDl8k2kEURLz+RqChu4O4S0UzbEmtja7wa7WYhYKv/tM/QeW7kyNJMmnPg==",
"decryptedSecretKey": "n7PSZ3U6SgerF9PCNhXYsq3S3fRKVGdZTicGV8Ur",
"canonicalId": "79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be",
"userName": "arsenal-0"
}

View File

@ -1,353 +0,0 @@
const assert = require('assert');
const { patchLocations } = require('../../../lib/patches/locationConstraints');
const {
privateKey, accessKey, decryptedSecretKey, secretKey,
} = require('./creds.json');
const tests = [
{
locationType: 'location-mem-v1',
locations: {
name: 'legacy',
objectId: 'legacy',
},
expected: {
details: {
supportsVersioning: true,
},
isTransient: false,
legacyAwsBehavior: false,
name: 'mem-loc',
objectId: 'legacy',
sizeLimitGB: null,
type: 'mem',
},
},
{
locationType: 'location-file-v1',
locations: {
objectId: 'us-east-1',
legacyAwsBehavior: true,
},
expected: {
details: {
supportsVersioning: true,
},
isTransient: false,
legacyAwsBehavior: true,
objectId: 'us-east-1',
sizeLimitGB: null,
type: 'file',
},
},
{
locationType: 'location-azure-v1',
locations: {
objectId: 'azurebackendtest',
details: {
bucketMatch: 'azurebucketmatch',
endpoint: 'azure.end.point',
bucketName: 'azurebucketname',
accessKey,
secretKey,
},
},
expected: {
details: {
azureContainerName: 'azurebucketname',
azureStorageAccessKey: decryptedSecretKey,
azureStorageAccountName: accessKey,
azureStorageEndpoint: 'azure.end.point',
bucketMatch: 'azurebucketmatch',
},
isTransient: false,
legacyAwsBehavior: false,
objectId: 'azurebackendtest',
sizeLimitGB: null,
type: 'azure',
},
},
{
locationType: 'location-aws-s3-v1',
locations: {
objectId: 'awsbackendtest',
details: {
bucketMatch: 'awsbucketmatch',
endpoint: 'aws.end.point',
bucketName: 'awsbucketname',
region: 'us-west-1',
accessKey,
secretKey,
},
},
expected: {
details: {
awsEndpoint: 'aws.end.point',
bucketMatch: 'awsbucketmatch',
bucketName: 'awsbucketname',
https: true,
pathStyle: false,
serverSideEncryption: false,
supportsVersioning: true,
region: 'us-west-1',
credentials: {
accessKey,
secretKey: decryptedSecretKey,
},
},
isTransient: false,
legacyAwsBehavior: false,
objectId: 'awsbackendtest',
sizeLimitGB: null,
type: 'aws_s3',
},
},
{
locationType: 'location-gcp-v1',
locations: {
name: 'gcpbackendtest',
objectId: 'gcpbackendtest',
details: {
bucketMatch: 'gcpbucketmatch',
endpoint: 'gcp.end.point',
accessKey: 'gcpaccesskey',
secretKey,
bucketName: 'gcpbucketname',
},
},
expected: {
details: {
bucketMatch: 'gcpbucketmatch',
bucketName: 'gcpbucketname',
credentials: {
accessKey: 'gcpaccesskey',
secretKey: decryptedSecretKey,
},
gcpEndpoint: 'gcp.end.point',
mpuBucketName: undefined,
https: true,
},
legacyAwsBehavior: false,
isTransient: false,
sizeLimitGB: null,
type: 'gcp',
objectId: 'gcpbackendtest',
},
},
{
locationType: 'location-scality-sproxyd-v1',
locations: {
name: 'sproxydbackendtest',
objectId: 'sproxydbackendtest',
details: {
chordCos: 3,
bootstrapList: ['localhost:8001', 'localhost:8002'],
proxyPath: '/proxy/path',
},
},
expected: {
details: {
connector: {
sproxyd: {
chordCos: 3,
bootstrap: [
'localhost:8001',
'localhost:8002',
],
path: '/proxy/path',
},
},
supportsVersioning: true,
},
legacyAwsBehavior: false,
isTransient: false,
sizeLimitGB: null,
type: 'scality',
objectId: 'sproxydbackendtest',
},
},
{
locationType: 'location-scality-ring-s3-v1',
locations: {
objectId: 'httpsawsbackendtest',
details: {
bucketMatch: 'rings3bucketmatch',
endpoint: 'https://secure.ring.end.point',
accessKey: 'rings3accesskey',
secretKey,
bucketName: 'rings3bucketname',
region: 'us-west-1',
},
},
expected: {
details: {
awsEndpoint: 'secure.ring.end.point',
bucketMatch: 'rings3bucketmatch',
bucketName: 'rings3bucketname',
credentials: {
accessKey: 'rings3accesskey',
secretKey: decryptedSecretKey,
},
https: true,
pathStyle: true,
region: 'us-west-1',
serverSideEncryption: false,
supportsVersioning: true,
},
legacyAwsBehavior: false,
isTransient: false,
sizeLimitGB: null,
type: 'aws_s3',
objectId: 'httpsawsbackendtest',
},
},
{
locationType: 'location-ceph-radosgw-s3-v1',
locations: {
objectId: 'cephbackendtest',
details: {
bucketMatch: 'cephbucketmatch',
endpoint: 'https://secure.ceph.end.point',
accessKey: 'cephs3accesskey',
secretKey,
bucketName: 'cephbucketname',
region: 'us-west-1',
},
},
expected: {
details: {
awsEndpoint: 'secure.ceph.end.point',
bucketMatch: 'cephbucketmatch',
bucketName: 'cephbucketname',
credentials: {
accessKey: 'cephs3accesskey',
secretKey: decryptedSecretKey,
},
https: true,
pathStyle: true,
region: 'us-west-1',
serverSideEncryption: false,
supportsVersioning: true,
},
legacyAwsBehavior: false,
isTransient: false,
sizeLimitGB: null,
type: 'aws_s3',
objectId: 'cephbackendtest',
},
},
{
name: 'transient enabled',
locationType: 'location-file-v1',
locations: {
objectId: 'transienttest',
isTransient: true,
},
expected: {
type: 'file',
objectId: 'transienttest',
legacyAwsBehavior: false,
isTransient: true,
sizeLimitGB: null,
details: {
supportsVersioning: true,
},
},
},
{
name: 'limited size',
locationType: 'location-file-v1',
locations: {
objectId: 'sizelimitedtest',
sizeLimitGB: 1024,
},
expected: {
type: 'file',
objectId: 'sizelimitedtest',
legacyAwsBehavior: false,
isTransient: false,
sizeLimitGB: 1024,
details: {
supportsVersioning: true,
},
},
},
{
name: 'zero size limit',
locationType: 'location-file-v1',
locations: {
objectId: 'sizezerotest',
sizeLimitGB: 0,
},
expected: {
type: 'file',
objectId: 'sizezerotest',
legacyAwsBehavior: false,
isTransient: false,
sizeLimitGB: null,
details: {
supportsVersioning: true,
},
},
},
];
describe('patch location constriants', () => {
const mockLog = {
info: () => {},
};
tests.forEach(spec => {
const testName = spec.name || `should patch ${spec.locationType}`;
it(testName, () => {
// copy specs to include extra attributes
const locations = spec.locations;
const expected = spec.expected;
// add a name to the locations and expected without having to include it
const locationName = spec.name || `name-${spec.locationType}`;
locations.name = locationName;
expected.name = locationName;
// also add the location type
locations.locationType = spec.locationType;
expected.locationType = spec.locationType;
assert.deepStrictEqual(
patchLocations(
{ [locationName]: locations },
{ privateKey },
mockLog,
),
{ [locationName]: expected },
);
});
});
it('undefined location', () => {
assert.deepStrictEqual(
patchLocations(
undefined,
{ privateKey },
mockLog,
),
{},
);
});
it('bad location type', () => {
assert.deepStrictEqual(
patchLocations(
{
name: {
locationType: 'bad-location',
},
},
{ privateKey },
mockLog,
),
{},
);
});
});

View File

@ -522,7 +522,7 @@ describe('LifecycleUtils::filterRules', () => {
const expRes1 = getRuleIDs(mBucketRules.filter(rule => const expRes1 = getRuleIDs(mBucketRules.filter(rule =>
(rule.Filter && rule.Filter.Tag && (rule.Filter && rule.Filter.Tag &&
rule.Filter.Tag.Key === 'tag1' && rule.Filter.Tag.Key === 'tag1' &&
rule.Filter.Tag.Value === 'val1'), rule.Filter.Tag.Value === 'val1')
)); ));
assert.deepStrictEqual(expRes1, getRuleIDs(res1)); assert.deepStrictEqual(expRes1, getRuleIDs(res1));
@ -532,7 +532,7 @@ describe('LifecycleUtils::filterRules', () => {
const expRes2 = getRuleIDs(mBucketRules.filter(rule => const expRes2 = getRuleIDs(mBucketRules.filter(rule =>
rule.Filter && rule.Filter.Tag && rule.Filter && rule.Filter.Tag &&
rule.Filter.Tag.Key === 'tag3-1' && rule.Filter.Tag.Key === 'tag3-1' &&
rule.Filter.Tag.Value === 'val3', rule.Filter.Tag.Value === 'val3'
)); ));
assert.deepStrictEqual(expRes2, getRuleIDs(res2)); assert.deepStrictEqual(expRes2, getRuleIDs(res2));
}); });

View File

@ -1,56 +0,0 @@
const assert = require('assert');
const crypto = require('crypto');
const { createAggregateETag } =
require('../../../lib/s3middleware/processMpuParts');
describe('createAggregateETag', () => {
[{
partETags: ['3858f62230ac3c915f300c664312c63f'],
aggregateETag: 'c4529dc85643bb0c5a96e46587377777-1',
}, {
partETags: ['ffc88b4ca90a355f8ddba6b2c3b2af5c',
'd067a0fa9dc61a6e7195ca99696b5a89'],
aggregateETag: '620e8b191a353bdc9189840bb3904928-2',
}, {
partETags: ['ffc88b4ca90a355f8ddba6b2c3b2af5c',
'd067a0fa9dc61a6e7195ca99696b5a89',
'49dcd91231f801159e893fb5c6674985',
'1292a1f4afecfeb84e1b200389d1c904',
'6b70b0751c98492074a7359f0f70d76d',
'5c55c71b3b582f6b700f83bb834f2430',
'84562b55618378a7ac5cfcbc7f3b2ceb',
'b5693c44bad7a2cf51c82c6a2fe1a4b6',
'628b37ac2dee9c123cd2e3e2e486eb27',
'4cacc7e3b7933e54422243964db169af',
'0add1fb9122cc9df84aee7c4bb86d658',
'5887704d69ee209f32c9314c345c8084',
'374e87eeee83bed471b78eefc8d7e28e',
'4e2af9f5fa8b64b19f78ddfbcfcab148',
'8e06231275f3afe7953fc7d57b65723f',
'c972158cb957cf48e18b475b908d5d82',
'311c2324dd756c9655129de049f69c9b',
'0188a9df3e1c4ce18f81e4ba24c672a0',
'1a15c4da6038a6626ad16473712eb358',
'd13c52938d8e0f01192d16b0de17ea4c'],
aggregateETag: 'd3d5a0ab698dd360e755a467f7899e7e-20',
}].forEach(test => {
it(`should compute aggregate ETag with ${test.partETags.length} parts`,
() => {
const aggregateETag = createAggregateETag(test.partETags);
assert.strictEqual(aggregateETag, test.aggregateETag);
});
});
it('should compute aggregate ETag with 10000 parts', () => {
const partETags = [];
for (let i = 0; i < 10000; ++i) {
const md5hash = crypto.createHash('md5');
md5hash.update(`part${i}`, 'binary');
partETags.push(md5hash.digest('hex'));
}
const aggregateETag = createAggregateETag(partETags);
assert.strictEqual(
aggregateETag, 'bff290751e485f06dcc0203c77ed2fd9-10000');
});
});

View File

@ -7,7 +7,6 @@ const {
_checkEtagNoneMatch, _checkEtagNoneMatch,
_checkModifiedSince, _checkModifiedSince,
_checkUnmodifiedSince, _checkUnmodifiedSince,
checkDateModifiedHeaders,
validateConditionalHeaders, validateConditionalHeaders,
} = require('../../../lib/s3middleware/validateConditionalHeaders'); } = require('../../../lib/s3middleware/validateConditionalHeaders');
@ -173,59 +172,6 @@ describe('validateConditionalHeaders util function ::', () => {
}); });
}); });
describe('checkDateModifiedHeaders util function: ', () => {
const expectedSuccess = {
present: true,
error: null,
};
const expectedAbsense = {
present: false,
error: null,
};
it('should return NotModified error for \'if-modified-since\' header',
() => {
const header = {};
header['if-modified-since'] = afterLastModified;
const { modifiedSinceRes, unmodifiedSinceRes } =
checkDateModifiedHeaders(header, lastModified);
assert.deepStrictEqual(modifiedSinceRes.error, errors.NotModified);
assert.deepStrictEqual(unmodifiedSinceRes, expectedAbsense);
});
it('should return PreconditionFailed error for \'if-unmodified-since\' ' +
'header', () => {
const header = {};
header['if-unmodified-since'] = beforeLastModified;
const { modifiedSinceRes, unmodifiedSinceRes } =
checkDateModifiedHeaders(header, lastModified);
assert.deepStrictEqual(unmodifiedSinceRes.error,
errors.PreconditionFailed);
assert.deepStrictEqual(modifiedSinceRes, expectedAbsense);
});
it('should succeed if \'if-modified-since\' header value is earlier ' +
'than last modified', () => {
const header = {};
header['if-modified-since'] = beforeLastModified;
const { modifiedSinceRes, unmodifiedSinceRes } =
checkDateModifiedHeaders(header, lastModified);
assert.deepStrictEqual(modifiedSinceRes, expectedSuccess);
assert.deepStrictEqual(unmodifiedSinceRes, expectedAbsense);
});
it('should succeed if \'if-unmodified-since\' header value is later ' +
'than last modified', () => {
const header = {};
header['if-unmodified-since'] = afterLastModified;
const { modifiedSinceRes, unmodifiedSinceRes } =
checkDateModifiedHeaders(header, lastModified);
assert.deepStrictEqual(unmodifiedSinceRes, expectedSuccess);
assert.deepStrictEqual(modifiedSinceRes, expectedAbsense);
});
});
describe('_checkEtagMatch function :', () => { describe('_checkEtagMatch function :', () => {
const expectedSuccess = { const expectedSuccess = {
present: true, present: true,

View File

@ -1,36 +0,0 @@
const assert = require('assert');
const routesUtils = require('../../../../lib/s3routes/routesUtils.js');
const bannedStr = 'banned';
const prefixBlacklist = [];
// byte size of 915
const keyutf8 = '%EA%9D%8B崰㈌㒈保轖䳷䀰⺩ቆ楪秲ⴝ㿅鼎퇬枅࿷염곞召㸾⌙ꪊᆐ庍뉆䌗幐鸆䛃➟녩' +
'ˍ뙪臅⠙≼绒벊냂詴 끴鹲萯⇂㭢䈊퉉楝舳㷖족痴䧫㾵᏷ำꎆ꼵껪멷㄀誕㳓腜쒃컹㑻鳃삚舿췈孨੦⮀NJ곓⵪꺼꜈' +
'嗼뫘悕錸瑺⁤륒㜓垻ㆩꝿ詀펉ᆙ舑䜾힑藪碙ꀎꂰ췊Ᏻ 㘺幽醛잯ද汧Ꟑꛒⶨ쪸숞헹㭔ꡔᘼ뺓ᡆ᡾ᑟ䅅퀭耓弧⢠⇙' +
'폪ް蛧⃪Ἔ돫ꕢ븥ヲ캂䝄쟐颺ᓾ둾Ұ껗礞ᾰ瘹蒯硳풛瞋襎奺熝妒컚쉴⿂㽝㝳駵鈚䄖戭䌸᫲ᇁ䙪鸮ᐴ稫ⶭ뀟ھ⦿' +
'䴳稉ꉕ捈袿놾띐✯伤䃫⸧ꠏ瘌틳藔ˋ㫣敀䔩㭘식↴⧵佶痊牌ꪌ搒꾛æᤈべ쉴挜敗羥誜嘳ֶꫜ걵ࣀ묟ኋ拃秷膤䨸菥' +
'䟆곘縧멀煣卲챸⧃⏶혣ਔ뙞밺㊑ک씌촃Ȅ頰ᖅ懚ホῐ꠷㯢먈㝹୥밷㮇䘖桲阥黾噘烻ᓧ鈠ᴥ徰穆ꘛ蹕綻表鯍裊' +
'鮕漨踒ꠍ픸Ä☶莒浏钸목탬툖氭ˠٸ൪㤌ᶟ訧ᜒೳ揪Ⴛ摖㸣᳑⹞걀ꢢ䏹ῖ"';
describe('routesUtils.isValidObjectKey', () => {
it('should return isValid false if object key name starts with a ' +
'blacklisted prefix', () => {
const result = routesUtils.isValidObjectKey('bannedkey', [bannedStr]);
// return { isValid: false, invalidPrefix };
assert.strictEqual(result.isValid, false);
assert.strictEqual(result.invalidPrefix, bannedStr);
});
it('should return isValid false if object key name exceeds length of 915',
() => {
const key = 'a'.repeat(916);
const result = routesUtils.isValidObjectKey(key, prefixBlacklist);
assert.strictEqual(result.isValid, false);
});
it('should return isValid true for a utf8 string of byte size 915', () => {
const result = routesUtils.isValidObjectKey(keyutf8, prefixBlacklist);
assert.strictEqual(result.isValid, true);
});
});

View File

@ -1,80 +0,0 @@
const assert = require('assert');
const http = require('http');
const werelogs = require('werelogs');
const logger = new werelogs.Logger('test:routesUtils.responseStreamData');
const { responseStreamData } = require('../../../../lib/s3routes/routesUtils.js');
const AwsClient = require('../../../../lib/storage/data/external/AwsClient');
const DummyObjectStream = require('../../storage/data/DummyObjectStream');
werelogs.configure({
level: 'debug',
dump: 'error',
});
describe('routesUtils.responseStreamData', () => {
const awsAgent = new http.Agent({
keepAlive: true,
});
const awsConfig = {
s3Params: {
endpoint: 'http://localhost:8888',
maxRetries: 0,
s3ForcePathStyle: true,
accessKeyId: 'accessKey',
secretAccessKey: 'secretKey',
httpOptions: {
agent: awsAgent,
},
},
bucketName: 'awsTestBucketName',
dataStoreName: 'awsDataStore',
serverSideEncryption: false,
type: 'aws',
};
let httpServer;
let awsClient;
beforeAll(done => {
awsClient = new AwsClient(awsConfig);
httpServer = http.createServer((req, res) => {
const objStream = new DummyObjectStream(0, 10000000);
res.setHeader('content-length', 10000000);
objStream.pipe(res);
}).listen(8888);
httpServer.on('listening', done);
httpServer.on('error', err => assert.ifError(err));
});
afterAll(() => {
httpServer.close();
});
it('should not leak socket if client closes the connection before ' +
'data backend starts streaming', done => {
responseStreamData(undefined, {}, {}, [{
key: 'foo',
size: 10000000,
}], {
client: awsClient,
implName: 'impl',
config: {},
locStorageCheckFn: () => {},
}, {
setHeader: () => {},
writeHead: () => {},
on: () => {},
once: () => {},
emit: () => {},
write: () => {},
end: () => setTimeout(() => {
const nOpenSockets = Object.keys(awsAgent.sockets).length;
assert.strictEqual(nOpenSockets, 0);
done();
}, 1000),
// fake a connection close from the S3 client by setting the "isclosed" flag
isclosed: true,
}, undefined, logger.newRequestLogger());
});
});

View File

@ -28,7 +28,9 @@ describe('test generating versionIds', () => {
// nodejs 10 no longer returns error for non-hex string versionIds // nodejs 10 no longer returns error for non-hex string versionIds
it.skip('should return error decoding non-hex string versionIds', () => { it.skip('should return error decoding non-hex string versionIds', () => {
assert(VID.hexDecode('foo') instanceof Error); const encoded = vids.map(vid => VID.hexEncode(vid));
const decoded = encoded.map(vid => VID.hexDecode(`${vid}foo`));
decoded.forEach(result => assert(result instanceof Error));
}); });
it('should encode and decode versionIds', () => { it('should encode and decode versionIds', () => {

View File

@ -1,538 +0,0 @@
const async = require('async');
const crypto = require('crypto');
const errors = require('../../lib/errors');
const kms = [];
let count = 1;
/** Class exposing common createDataKey,
createDecipher and createCipher functions. */
class Common {
static _algorithm() {
return 'aes-256-ctr';
}
/* AES-256 Key */
static _keySize() {
return 32;
}
/* IV is 128bit for AES-256-CTR */
static _IVSize() {
return 16;
}
/* block size is 128bit for AES-256-CTR */
static _aesBlockSize() {
return 16;
}
/**
Creates data key to encrypt and decrypt the actual data (which data
key is ciphered and deciphered by the appliance). The encrypted data key
is stored in the object's metadata. We also use this function to create
the bucket key for the file and in memory implementations.
@return {buffer} - random key
*/
static createDataKey() {
return Buffer.from(crypto.randomBytes(this._keySize()));
}
/**
*
* @param {buffer} derivedIV - the stringified bucket
* @param {number} counter - quotient of the offset and blocksize
* @return {buffer} - the incremented IV
*/
static _incrementIV(derivedIV, counter) {
const newIV = derivedIV;
const len = derivedIV.length;
let i = len - 1;
let ctr = counter;
while (ctr !== 0) {
const mod = (ctr + newIV[i]) % 256;
ctr = Math.floor((ctr + newIV[i]) / 256);
newIV[i] = mod;
i -= 1;
if (i < 0) {
i = len - 1;
}
}
return newIV;
}
/**
* Derive key to use in cipher
* @param {number} cryptoScheme - cryptoScheme being used
* @param {buffer} dataKey - the unencrypted key (either from the
* appliance on a get or originally generated by kms in the case of a put)
* @param {object} log - logger object
* @param {function} cb - cb from createDecipher
* @returns {undefined}
* @callback called with (err, derivedKey, derivedIV)
*/
static _deriveKey(cryptoScheme, dataKey, log, cb) {
if (cryptoScheme <= 1) {
/* we are not storing hashed human password.
* It's a random key, so 1 iteration and
* a fixed salt is enough for our usecase.
* don't change the salt, the iteration number
* or the digest algorithm (sha1 here) without
* bumping the cryptoScheme number saved in the object
* metadata along with the dataKey.
*/
const salt = Buffer.from('ItsTasty', 'utf8');
const iterations = 1;
return crypto.pbkdf2(
dataKey, salt, iterations,
this._keySize(), 'sha1', (err, derivedKey) => {
if (err) {
log.error('pbkdf2 function failed on key derivation',
{ error: err });
cb(errors.InternalError);
return;
}
crypto.pbkdf2(
derivedKey, salt, iterations,
this._IVSize(), 'sha1', (err, derivedIV) => {
if (err) {
log.error(
'pbkdf2 function failed on IV derivation',
{ error: err });
return cb(errors.InternalError);
}
// derivedKey is the actual data encryption or
// decryption key used in the AES ctr cipher
return cb(null, derivedKey, derivedIV);
});
});
}
log.error('Unknown cryptographic scheme', { cryptoScheme });
return cb(errors.InternalError);
}
/**
* createDecipher
* @param {number} cryptoScheme - cryptoScheme being used
* @param {buffer} dataKey - the unencrypted key (either from the
* appliance on a get or originally generated by kms in the case of a put)
* @param {number} offset - offset
* @param {object} log - logger object
* @param {function} cb - cb from external call
* @returns {undefined}
* @callback called with (err, decipher: ReadWritable.stream)
*/
static createDecipher(cryptoScheme, dataKey, offset, log, cb) {
this._deriveKey(
cryptoScheme, dataKey, log,
(err, derivedKey, derivedIV) => {
if (err) {
log.debug('key derivation failed', { error: err });
return cb(err);
}
const aesBlockSize = this._aesBlockSize();
const blocks = Math.floor(offset / aesBlockSize);
const toSkip = offset % aesBlockSize;
const iv = this._incrementIV(derivedIV, blocks);
const cipher = crypto.createDecipheriv(this._algorithm(),
derivedKey, iv);
if (toSkip) {
/* Above, we advanced to the latest boundary not
greater than the offset amount. Here we advance by
the toSkip amount if necessary. */
const dummyBuffer = Buffer.alloc(toSkip);
cipher.write(dummyBuffer);
cipher.read();
}
return cb(null, cipher);
});
}
/**
* createCipher (currently same as createDecipher function above. this
* wrapper is included to preserve flexibility)
* @param {number} cryptoScheme - cryptoScheme being used
* @param {buffer} dataKey - the unencrypted key (either from the
* appliance on a get or originally generated by kms in the case of a put)
* @param {number} offset - offset
* @param {object} log - logger object
* @param {function} cb - cb from external call
* @returns {undefined}
* @callback called with (err, cipher: ReadWritable.stream)
*/
static createCipher(cryptoScheme, dataKey, offset, log, cb) {
/* aes-256-ctr decipher is both ways */
this.createDecipher(cryptoScheme, dataKey, offset, log, cb);
}
}
const backend = {
/*
* Target implementation will be async. let's mimic it
*/
/**
*
* @param {string} bucketName - bucket name
* @param {object} log - logger object
* @param {function} cb - callback
* @returns {undefined}
* @callback called with (err, masterKeyId: string)
*/
createBucketKey: function createBucketKeyMem(bucketName, log, cb) {
process.nextTick(() => {
// Using createDataKey here for purposes of createBucketKeyMem
// so that we do not need a separate function.
kms[count] = Common.createDataKey();
cb(null, (count++).toString());
});
},
/**
*
* @param {string} bucketKeyId - the Id of the bucket key
* @param {object} log - logger object
* @param {function} cb - callback
* @returns {undefined}
* @callback called with (err)
*/
destroyBucketKey: function destroyBucketKeyMem(bucketKeyId, log, cb) {
process.nextTick(() => {
kms[bucketKeyId] = undefined;
cb(null);
});
},
/**
*
* @param {number} cryptoScheme - crypto scheme version number
* @param {string} masterKeyId - key to retrieve master key
* @param {buffer} plainTextDataKey - data key
* @param {object} log - logger object
* @param {function} cb - callback
* @returns {undefined}
* @callback called with (err, cipheredDataKey: Buffer)
*/
cipherDataKey: function cipherDataKeyMem(cryptoScheme,
masterKeyId,
plainTextDataKey,
log,
cb) {
process.nextTick(() => {
Common.createCipher(
cryptoScheme, kms[masterKeyId], 0, log,
(err, cipher) => {
if (err) {
cb(err);
return;
}
let cipheredDataKey =
cipher.update(plainTextDataKey);
// call final() to ensure that any bytes remaining in
// the output of the stream are captured
const final = cipher.final();
if (final.length !== 0) {
cipheredDataKey =
Buffer.concat([cipheredDataKey,
final]);
}
cb(null, cipheredDataKey);
});
});
},
/**
*
* @param {number} cryptoScheme - crypto scheme version number
* @param {string} masterKeyId - key to retrieve master key
* @param {buffer} cipheredDataKey - data key
* @param {object} log - logger object
* @param {function} cb - callback
* @returns {undefined}
* @callback called with (err, plainTextDataKey: Buffer)
*/
decipherDataKey: function decipherDataKeyMem(cryptoScheme,
masterKeyId,
cipheredDataKey,
log,
cb) {
process.nextTick(() => {
Common.createDecipher(
cryptoScheme, kms[masterKeyId], 0, log,
(err, decipher) => {
if (err) {
cb(err);
return;
}
let plainTextDataKey =
decipher.update(cipheredDataKey);
const final = decipher.final();
if (final.length !== 0) {
plainTextDataKey =
Buffer.concat([plainTextDataKey,
final]);
}
cb(null, plainTextDataKey);
});
});
},
};
const client = backend;
const implName = 'mem';
class KMS {
/**
*
* @param {string} bucketName - bucket name
* @param {object} log - logger object
* @param {function} cb - callback
* @returns {undefined}
* @callback called with (err, masterKeyId: string)
*/
static createBucketKey(bucketName, log, cb) {
log.debug('creating a new bucket key');
client.createBucketKey(bucketName, log, (err, masterKeyId) => {
if (err) {
log.debug('error from kms', { implName, error: err });
return cb(err);
}
log.trace('bucket key created in kms');
return cb(null, masterKeyId);
});
}
/**
*
* @param {string} bucketName - bucket name
* @param {object} headers - request headers
* @param {object} log - logger object
* @param {function} cb - callback
* @returns {undefined}
* @callback called with (err, serverSideEncryptionInfo: object)
*/
static bucketLevelEncryption(bucketName, headers, log, cb) {
const sseAlgorithm = headers['x-amz-scal-server-side-encryption'];
const sseMasterKeyId =
headers['x-amz-scal-server-side-encryption-aws-kms-key-id'];
/*
The purpose of bucket level encryption is so that the client does not
have to send appropriate headers to trigger encryption on each object
put in an "encrypted bucket". Customer provided keys are not
feasible in this system because we do not want to store this key
in the bucket metadata.
*/
if (sseAlgorithm === 'AES256' ||
(sseAlgorithm === 'aws:kms' && sseMasterKeyId === undefined)) {
this.createBucketKey(bucketName, log, (err, masterKeyId) => {
if (err) {
cb(err);
return;
}
const serverSideEncryptionInfo = {
cryptoScheme: 1,
algorithm: sseAlgorithm,
masterKeyId,
mandatory: true,
};
cb(null, serverSideEncryptionInfo);
});
} else if (sseAlgorithm === 'aws:kms') {
const serverSideEncryptionInfo = {
cryptoScheme: 1,
algorithm: sseAlgorithm,
masterKeyId: sseMasterKeyId,
mandatory: true,
};
cb(null, serverSideEncryptionInfo);
} else {
/*
* no encryption
*/
cb(null, null);
}
}
/**
*
* @param {string} bucketKeyId - the Id of the bucket key
* @param {object} log - logger object
* @param {function} cb - callback
* @returns {undefined}
* @callback called with (err)
*/
static destroyBucketKey(bucketKeyId, log, cb) {
log.debug('deleting bucket key', { bucketKeyId });
client.destroyBucketKey(bucketKeyId, log, err => {
if (err) {
log.debug('error from kms', { implName, error: err });
return cb(err);
}
log.trace('bucket key destroyed in kms');
return cb(null);
});
}
/**
*
* @param {object} log - logger object
* @returns {buffer} newKey - a data key
*/
static createDataKey(log) {
log.debug('creating a new data key');
const newKey = Common.createDataKey();
log.trace('data key created by the kms');
return newKey;
}
/**
* createCipherBundle
* @param {object} serverSideEncryptionInfo - info for encryption
* @param {number} serverSideEncryptionInfo.cryptoScheme -
* cryptoScheme used
* @param {string} serverSideEncryptionInfo.algorithm -
* algorithm to use
* @param {string} serverSideEncryptionInfo.masterKeyId -
* key to get master key
* @param {boolean} serverSideEncryptionInfo.mandatory -
* true for mandatory encryption
* @param {object} log - logger object
* @param {function} cb - cb from external call
* @returns {undefined}
* @callback called with (err, cipherBundle)
*/
static createCipherBundle(serverSideEncryptionInfo,
log, cb) {
const dataKey = this.createDataKey(log);
const cipherBundle = {
algorithm: serverSideEncryptionInfo.algorithm,
masterKeyId: serverSideEncryptionInfo.masterKeyId,
cryptoScheme: 1,
cipheredDataKey: null,
cipher: null,
};
async.waterfall([
function cipherDataKey(next) {
log.debug('ciphering a data key');
return client.cipherDataKey(cipherBundle.cryptoScheme,
serverSideEncryptionInfo.masterKeyId,
dataKey, log, (err, cipheredDataKey) => {
if (err) {
log.debug('error from kms',
{ implName, error: err });
return next(err);
}
log.trace('data key ciphered by the kms');
return next(null, cipheredDataKey);
});
},
function createCipher(cipheredDataKey, next) {
log.debug('creating a cipher');
cipherBundle.cipheredDataKey =
cipheredDataKey.toString('base64');
return Common.createCipher(cipherBundle.cryptoScheme,
dataKey, 0, log, (err, cipher) => {
dataKey.fill(0);
if (err) {
log.debug('error from kms',
{ implName, error: err });
return next(err);
}
log.trace('cipher created by the kms');
return next(null, cipher);
});
},
function finishCipherBundle(cipher, next) {
cipherBundle.cipher = cipher;
return next(null, cipherBundle);
},
], (err, cipherBundle) => {
if (err) {
log.error('error processing cipher bundle',
{ implName, error: err });
}
return cb(err, cipherBundle);
});
}
/**
* createDecipherBundle
* @param {object} serverSideEncryptionInfo - info for decryption
* @param {number} serverSideEncryptionInfo.cryptoScheme -
* cryptoScheme used
* @param {string} serverSideEncryptionInfo.algorithm -
* algorithm to use
* @param {string} serverSideEncryptionInfo.masterKeyId -
* key to get master key
* @param {boolean} serverSideEncryptionInfo.mandatory -
* true for mandatory encryption
* @param {buffer} serverSideEncryptionInfo.cipheredDataKey -
* ciphered data key
* @param {number} offset - offset for decryption
* @param {object} log - logger object
* @param {function} cb - cb from external call
* @returns {undefined}
* @callback called with (err, decipherBundle)
*/
static createDecipherBundle(serverSideEncryptionInfo, offset,
log, cb) {
if (!serverSideEncryptionInfo.masterKeyId ||
!serverSideEncryptionInfo.cipheredDataKey ||
!serverSideEncryptionInfo.cryptoScheme) {
log.error('Invalid cryptographic information', { implName });
return cb(errors.InternalError);
}
const decipherBundle = {
cryptoScheme: serverSideEncryptionInfo.cryptoScheme,
decipher: null,
};
return async.waterfall([
function decipherDataKey(next) {
return client.decipherDataKey(
decipherBundle.cryptoScheme,
serverSideEncryptionInfo.masterKeyId,
serverSideEncryptionInfo.cipheredDataKey,
log, (err, plainTextDataKey) => {
log.debug('deciphering a data key');
if (err) {
log.debug('error from kms',
{ implName, error: err });
return next(err);
}
log.trace('data key deciphered by the kms');
return next(null, plainTextDataKey);
});
},
function createDecipher(plainTextDataKey, next) {
log.debug('creating a decipher');
return Common.createDecipher(decipherBundle.cryptoScheme,
plainTextDataKey, offset, log, (err, decipher) => {
plainTextDataKey.fill(0);
if (err) {
log.debug('error from kms',
{ implName, error: err });
return next(err);
}
log.trace('decipher created by the kms');
return next(null, decipher);
});
},
function finishDecipherBundle(decipher, next) {
decipherBundle.decipher = decipher;
return next(null, decipherBundle);
},
], (err, decipherBundle) => {
if (err) {
log.error('error processing decipher bundle',
{ implName, error: err });
return cb(err);
}
return cb(err, decipherBundle);
});
}
}
module.exports = KMS;

View File

@ -177,7 +177,7 @@ class LoopbackServerChannel extends EchoChannel {
serverExtensions.map(extension => serverExtensions.map(extension =>
this.KMIP.TextString( this.KMIP.TextString(
extension.name, extension.name,
extension.value), extension.value)
))); )));
} }
if (queryFunctions.includes('Query Extension Map')) { if (queryFunctions.includes('Query Extension Map')) {

View File

@ -50,6 +50,7 @@ class EchoChannel extends EventEmitter {
this.clogged = true; this.clogged = true;
return this; return this;
} }
} }
class MirrorChannel extends EchoChannel { class MirrorChannel extends EchoChannel {

View File

@ -1,185 +0,0 @@
'use strict'; // eslint-disable-line strict
class DummyProxyResponse {
/**
* Create a new instance of this dummy class
*
* This dummy class implements the minimum feature set
* of the class http.OutgoingMessage suitable for the
* arsenal.storage.metadata.proxy.BucketdRoutes test
* without using an actuall http server.
*
* @param {function} doneCB - function called once the response is
* ready to be consummed. (err, response, body)
*/
constructor(doneCB) {
this.headers = {};
this.body = null;
this.endCalled = false;
this.responseHead = null;
this.doneCB = doneCB;
}
writeHead(statusCode, statusMessage, header) {
this.responseHead = {
statusCode,
statusMessage,
header,
};
}
write(data) {
this.body = data;
}
end(cb) {
if (this.endCalled) {
return;
}
this.endCalled = true;
process.nextTick(() => {
cb(null);
this.doneCB(null, this, JSON.parse(this.body));
});
}
}
class DummyProxyRequest {
/**
* Create a new instance of this dummy class
*
* This dummy class implements the minimum feature set
* of the class http.IncomingMessage suitable for the
* arsenal.storage.metadata.proxy.BucketdRoutes test
* without using an actuall http server.
*
* @param {object} params - parameter set describing the intended request
* @param {string} params.method - http method to fake
* @param {string} params.url - url to fake
* @param {string} params.body - request body to fake
* @param {boolean} params.json - if set, assume the body to be a JSON
* value to be serialized
* @param {object} params.headers - request headers to fake
*/
constructor(params) {
this.method = params.method;
this.url = params.url;
this.json = params.json;
this.body = Buffer.from(
this.json ? JSON.stringify(params.body) : (params.body || ''));
this.headers = params.headers;
this.socket = {
remoteAddress: '127.0.0.1',
remotePort: 32769,
};
this.dataConsummed = false;
this.endCB = null;
}
/**
* bind a callback to a particular event on the request processing
*
* @param {string} event - one of 'data', 'end' or 'error'
* @param {function} callback - a function suitable for the associated event
* @returns {object} this
*/
on(event, callback) {
switch (event) {
case 'data':
process.nextTick(() => {
callback(this.body);
this.dataConsummed = true;
if (this.endCB) {
this.endCB();
}
});
break;
case 'end':
if (!this.dataConsummed) {
this.endCB = callback;
} else {
process.nextTick(() => {
callback();
});
}
break;
case 'error':
// never happen with this mockup class
break;
default:
process.nextTick(() => callback(new Error(
`Unsupported DummyProxyRequest.on event '${event}'`)));
}
return this;
}
}
class RequestDispatcher {
/**
* Construct a new RequestDispatcher object.
*
* This class connects the provided Routes class to a dummy interface
* that enables tests to perform requests without using an actual http
* server.
*
* @param {object} routes - an instance of a Routes dispatcher class
*/
constructor(routes) {
this.routes = routes;
}
/**
* fake a POST request on the associated Routes dispatcher
*
* @param {string} path - the path of the object to be posted
* @param {object} objectMD - the metadata to post for this object
* @param {function} callback - called once the request has been processed
* with these parameters (err)
* @returns {undefined}
*/
post(path, objectMD, callback) {
this.routes.dispatch(new DummyProxyRequest({
method: 'POST',
url: path,
json: true,
body: objectMD,
headers: {},
}), new DummyProxyResponse(callback));
}
/**
* fake a GET request on the associated Routes dispatcher
*
* @param {string} path - the path of the object to be retrieved
* @param {function} callback - called once the request has been processed
* with these parameters (err, response, body)
* @returns {undefined}
*/
get(path, callback) {
this.routes.dispatch(new DummyProxyRequest({
method: 'GET',
url: path,
json: true,
body: '',
headers: {},
}), new DummyProxyResponse(callback));
}
/**
* fake a DELETE request on the associated Routes dispatcher
*
* @param {string} path - the path of the object to be deleted
* @param {function} callback - called once the request has been processed
* with these parameters (err)
* @returns {undefined}
*/
delete(path, callback) {
this.routes.dispatch(new DummyProxyRequest({
method: 'DELETE',
url: path,
json: true,
body: '',
headers: {},
}), new DummyProxyResponse(callback));
}
}
module.exports = { RequestDispatcher };

Some files were not shown because too many files have changed in this diff Show More