Compare commits

..

27 Commits

Author SHA1 Message Date
Anurag Mittal d90bc4f741
updated data message 2024-07-04 00:51:11 +02:00
Anurag Mittal b9dd7139ad
updated events 2024-07-03 14:14:08 +02:00
Anurag Mittal 6543d9f88d
updated events for active span 2024-07-01 14:22:51 +02:00
Anurag Mittal 44efcd625c
updated authv4 2024-07-01 14:18:20 +02:00
Anurag Mittal c6bb489ade
updated signed headers request 2024-07-01 14:06:20 +02:00
Anurag Mittal 76c4c2b2bb
added details about headers 2024-07-01 13:51:44 +02:00
Anurag Mittal d15d6f8a06
updated authv4 spans 2024-07-01 13:37:06 +02:00
Anurag Mittal 8d40bab08f
updated more auth details 2024-07-01 00:58:41 +02:00
Anurag Mittal 24f6d8374e
added more details to authv4 2024-07-01 00:44:00 +02:00
Anurag Mittal bb3b448757
added auth spans 2024-07-01 00:31:12 +02:00
Anurag Mittal 892dee6c13
fixup 2024-06-28 18:06:48 +02:00
Anurag Mittal 6876861b5d
expanded auth 2024-06-28 17:58:35 +02:00
Anurag Mittal ff66b13a1a
updated vault method 2024-06-28 17:06:29 +02:00
Anurag Mittal a1ac267b48
fixup 2024-06-28 05:49:52 +02:00
Anurag Mittal a12716ffe3
updated s3 span names 2024-06-27 19:55:49 +02:00
Anurag Mittal 05173de018
updated getting data spans 2024-06-26 00:35:22 +02:00
Anurag Mittal b75d73fe40
updated parent span name 2024-06-25 22:57:30 +02:00
Anurag Mittal 052113c0ff
deleteroute fixup for rb bucket 2024-06-19 01:08:09 +02:00
Anurag Mittal 5af62b174d
removed extra, set sttributed 2024-06-19 00:00:38 +02:00
Anurag Mittal 4e158a25b6
routes.js attribute fixup 2024-06-18 23:59:17 +02:00
Anurag Mittal 8fd1c42d8d
reveted debug statements 2024-06-18 17:17:49 +02:00
Anurag Mittal f77da8a8a2
added Metadata list objects instrumentation 2024-06-18 17:09:10 +02:00
Anurag Mittal a291fbc10b
fixup 2024-06-18 16:49:49 +02:00
Anurag Mittal 4a76a9c5f6
S3C-8896-add-instrumentation-for-list-object-md-api 2024-06-18 16:45:15 +02:00
Anurag Mittal 068570bc26
S3C-8896: added events in doAuth 2024-06-18 14:17:42 +02:00
Anurag Mittal b7122681c2
S3C-8893: instrumented all routes 2024-06-18 09:47:52 +02:00
Anurag Mittal 3f7eb4c31d
S3C-8896-add-tracing-for-get-route 2024-06-17 20:34:54 +02:00
161 changed files with 9789 additions and 18070 deletions

View File

@ -1,6 +1 @@
{ { "extends": "scality" }
"extends": "scality",
"parserOptions": {
"ecmaVersion": 2020
}
}

View File

@ -25,30 +25,24 @@ jobs:
- 6379:6379 - 6379:6379
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v2
- uses: actions/setup-node@v4 - uses: actions/setup-node@v2
with: with:
node-version: '16' node-version: '16'
cache: 'yarn' cache: 'yarn'
- name: install dependencies - name: install dependencies
run: yarn install --frozen-lockfile --prefer-offline --network-concurrency 1 run: yarn install --frozen-lockfile --prefer-offline
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
- name: lint yaml - name: lint yaml
run: yarn --silent lint_yml run: yarn --silent lint_yml
- name: lint javascript - name: lint javascript
run: yarn --silent lint --max-warnings 0 run: yarn --silent lint -- --max-warnings 0
- name: lint markdown - name: lint markdown
run: yarn --silent lint_md run: yarn --silent lint_md
- name: add hostname - name: run unit tests
run: | run: yarn test
sudo sh -c "echo '127.0.0.1 testrequestbucket.localhost' >> /etc/hosts"
- name: test and coverage
run: yarn --silent coverage
- name: run functional tests - name: run functional tests
run: yarn ft_test run: yarn ft_test
- uses: codecov/codecov-action@v4
with:
token: ${{ secrets.CODECOV_TOKEN }}
- name: run executables tests - name: run executables tests
run: yarn install && yarn test run: yarn install && yarn test
working-directory: 'lib/executables/pensieveCreds/' working-directory: 'lib/executables/pensieveCreds/'

12
.swcrc
View File

@ -1,12 +0,0 @@
{
"$schema": "https://swc.rs/schema.json",
"jsc": {
"parser": {
"syntax": "typescript"
},
"target": "es2017"
},
"module": {
"type": "commonjs"
}
}

View File

@ -1,7 +1,5 @@
# Arsenal # Arsenal
[![codecov](https://codecov.io/gh/scality/Arsenal/branch/development/8.1/graph/badge.svg?token=X0esXhJSwb)](https://codecov.io/gh/scality/Arsenal)
Common utilities for the S3 project components Common utilities for the S3 project components
Within this repository, you will be able to find the shared libraries for the Within this repository, you will be able to find the shared libraries for the

View File

@ -85,66 +85,6 @@ Used to store the bucket lifecycle configuration info
### Properties Added ### Properties Added
```javascript
this._uid = uid || uuid();
```
### Usage
Used to set a unique identifier on a bucket
## Model version 8
### Properties Added
```javascript
this._readLocationConstraint = readLocationConstraint || null;
```
### Usage
Used to store default read location of the bucket
## Model version 9
### Properties Added
```javascript
this._isNFS = isNFS || null;
```
### Usage
Used to determine whether the bucket may be accessed through NFS
## Model version 10
### Properties Added
```javascript
this._ingestion = ingestionConfig || null;
```
### Usage
Used to store the ingestion status of a bucket
## Model version 11
### Properties Added
```javascript
this._azureInfo = azureInfo || null;
```
### Usage
Used to store Azure storage account specific information
## Model version 12
### Properties Added
```javascript ```javascript
this._objectLockEnabled = objectLockEnabled || false; this._objectLockEnabled = objectLockEnabled || false;
this._objectLockConfiguration = objectLockConfiguration || null; this._objectLockConfiguration = objectLockConfiguration || null;
@ -155,7 +95,7 @@ this._objectLockConfiguration = objectLockConfiguration || null;
Used to determine whether object lock capabilities are enabled on a bucket and Used to determine whether object lock capabilities are enabled on a bucket and
to store the object lock configuration of the bucket to store the object lock configuration of the bucket
## Model version 13 ## Model version 8
### Properties Added ### Properties Added
@ -167,7 +107,7 @@ this._notificationConfiguration = notificationConfiguration || null;
Used to store the bucket notification configuration info Used to store the bucket notification configuration info
## Model version 14 ## Model version 9
### Properties Added ### Properties Added
@ -179,7 +119,19 @@ this._serverSideEncryption.configuredMasterKeyId = configuredMasterKeyId || unde
Used to store the users configured KMS key id Used to store the users configured KMS key id
## Model version 15 ## Model version 10
### Properties Added
```javascript
this._uid = uid || uuid();
```
### Usage
Used to set a unique identifier on a bucket
## Model version 11
### Properties Added ### Properties Added
@ -187,74 +139,6 @@ Used to store the users configured KMS key id
this._tags = tags || null; this._tags = tags || null;
``` ```
The Tag Set of a bucket is an array of objects with Key and Value:
```javascript
[
{
Key: 'something',
Value: 'some_data'
}
]
```
## Model version 16
### Properties Added
```javascript
this._capabilities = capabilities || undefined;
```
For capacity-enabled buckets, contains the following data:
```javascript
{
_capabilities: {
VeeamSOSApi?: {
SystemInfo?: {
ProtocolVersion: String,
ModelName: String,
ProtocolCapabilities: {
CapacityInfo: Boolean,
UploadSessions: Boolean,
IAMSTS: Boolean,
},
APIEndpoints: {
IAMEndpoint: String,
STSEndpoint: String,
},
SystemRecommendations?: {
S3ConcurrentTaskLimit: Number,
S3MultiObjectDelete: Number,
StorageCurrentTasksLimit: Number,
KbBlockSize: Number,
}
LastModified?: String,
},
CapacityInfo?: {
Capacity: Number,
Available: Number,
Used: Number,
LastModified?: String,
},
}
},
}
```
### Usage ### Usage
Used to store bucket tagging Used to store bucket tagging
## Model version 17
### Properties Added
```javascript
this._quotaMax = quotaMax || 0;
```
### Usage
Used to store bucket quota

View File

@ -1,28 +0,0 @@
{
"groups": {
"default": {
"packages": [
"lib/executables/pensieveCreds/package.json",
"package.json"
]
}
},
"branchPrefix": "improvement/greenkeeper.io/",
"commitMessages": {
"initialBadge": "docs(readme): add Greenkeeper badge",
"initialDependencies": "chore(package): update dependencies",
"initialBranches": "chore(bert-e): whitelist greenkeeper branches",
"dependencyUpdate": "fix(package): update ${dependency} to version ${version}",
"devDependencyUpdate": "chore(package): update ${dependency} to version ${version}",
"dependencyPin": "fix: pin ${dependency} to ${oldVersionResolved}",
"devDependencyPin": "chore: pin ${dependency} to ${oldVersionResolved}",
"closes": "\n\nCloses #${number}"
},
"ignore": [
"ajv",
"eslint",
"eslint-plugin-react",
"eslint-config-airbnb",
"eslint-config-scality"
]
}

View File

@ -1,19 +1,14 @@
import * as evaluators from './lib/policyEvaluator/evaluator'; import * as evaluators from './lib/policyEvaluator/evaluator';
import evaluatePrincipal from './lib/policyEvaluator/principal'; import evaluatePrincipal from './lib/policyEvaluator/principal';
import RequestContext, { import RequestContext from './lib/policyEvaluator/RequestContext';
actionNeedQuotaCheck,
actionNeedQuotaCheckCopy,
actionWithDataDeletion } from './lib/policyEvaluator/RequestContext';
import * as requestUtils from './lib/policyEvaluator/requestUtils'; import * as requestUtils from './lib/policyEvaluator/requestUtils';
import * as actionMaps from './lib/policyEvaluator/utils/actionMaps'; import * as actionMaps from './lib/policyEvaluator/utils/actionMaps';
import { validateUserPolicy } from './lib/policy/policyValidator' import { validateUserPolicy } from './lib/policy/policyValidator'
import * as locationConstraints from './lib/patches/locationConstraints';
import * as userMetadata from './lib/s3middleware/userMetadata'; import * as userMetadata from './lib/s3middleware/userMetadata';
import convertToXml from './lib/s3middleware/convertToXml'; import convertToXml from './lib/s3middleware/convertToXml';
import escapeForXml from './lib/s3middleware/escapeForXml'; import escapeForXml from './lib/s3middleware/escapeForXml';
import * as objectLegalHold from './lib/s3middleware/objectLegalHold'; import * as objectLegalHold from './lib/s3middleware/objectLegalHold';
import * as tagging from './lib/s3middleware/tagging'; import * as tagging from './lib/s3middleware/tagging';
import { checkDateModifiedHeaders } from './lib/s3middleware/validateConditionalHeaders';
import { validateConditionalHeaders } from './lib/s3middleware/validateConditionalHeaders'; import { validateConditionalHeaders } from './lib/s3middleware/validateConditionalHeaders';
import MD5Sum from './lib/s3middleware/MD5Sum'; import MD5Sum from './lib/s3middleware/MD5Sum';
import NullStream from './lib/s3middleware/nullStream'; import NullStream from './lib/s3middleware/nullStream';
@ -21,10 +16,8 @@ import * as objectUtils from './lib/s3middleware/objectUtils';
import * as mpuUtils from './lib/s3middleware/azureHelpers/mpuUtils'; import * as mpuUtils from './lib/s3middleware/azureHelpers/mpuUtils';
import ResultsCollector from './lib/s3middleware/azureHelpers/ResultsCollector'; import ResultsCollector from './lib/s3middleware/azureHelpers/ResultsCollector';
import SubStreamInterface from './lib/s3middleware/azureHelpers/SubStreamInterface'; import SubStreamInterface from './lib/s3middleware/azureHelpers/SubStreamInterface';
import { prepareStream } from './lib/s3middleware/prepareStream';
import * as processMpuParts from './lib/s3middleware/processMpuParts'; import * as processMpuParts from './lib/s3middleware/processMpuParts';
import * as retention from './lib/s3middleware/objectRetention'; import * as retention from './lib/s3middleware/objectRetention';
import * as objectRestore from './lib/s3middleware/objectRestore';
import * as lifecycleHelpers from './lib/s3middleware/lifecycleHelpers'; import * as lifecycleHelpers from './lib/s3middleware/lifecycleHelpers';
export { default as errors } from './lib/errors'; export { default as errors } from './lib/errors';
export { default as Clustering } from './lib/Clustering'; export { default as Clustering } from './lib/Clustering';
@ -41,15 +34,22 @@ export * as stream from './lib/stream';
export * as jsutil from './lib/jsutil'; export * as jsutil from './lib/jsutil';
export { default as stringHash } from './lib/stringHash'; export { default as stringHash } from './lib/stringHash';
export * as db from './lib/db'; export * as db from './lib/db';
export * as errorUtils from './lib/errorUtils';
export { default as shuffle } from './lib/shuffle'; export { default as shuffle } from './lib/shuffle';
export * as models from './lib/models'; export * as models from './lib/models';
export const algorithms = { export const algorithms = {
list: require('./lib/algos/list/exportAlgos'), list: {
Basic: require('./lib/algos/list/basic').List,
Delimiter: require('./lib/algos/list/delimiter').Delimiter,
DelimiterVersions: require('./lib/algos/list/delimiterVersions').DelimiterVersions,
DelimiterMaster: require('./lib/algos/list/delimiterMaster').DelimiterMaster,
MPU: require('./lib/algos/list/MPU').MultipartUploads,
DelimiterCurrent: require('./lib/algos/list/delimiterCurrent').DelimiterCurrent,
DelimiterNonCurrent: require('./lib/algos/list/delimiterNonCurrent').DelimiterNonCurrent,
DelimiterOrphanDeleteMarker: require('./lib/algos/list/delimiterOrphanDeleteMarker').DelimiterOrphanDeleteMarker,
},
listTools: { listTools: {
DelimiterTools: require('./lib/algos/list/tools'), DelimiterTools: require('./lib/algos/list/tools'),
Skip: require('./lib/algos/list/skip'),
}, },
cache: { cache: {
GapSet: require('./lib/algos/cache/GapSet'), GapSet: require('./lib/algos/cache/GapSet'),
@ -70,9 +70,6 @@ export const policies = {
RequestContext, RequestContext,
requestUtils, requestUtils,
actionMaps, actionMaps,
actionNeedQuotaCheck,
actionWithDataDeletion,
actionNeedQuotaCheckCopy,
}; };
export const testing = { export const testing = {
@ -85,7 +82,6 @@ export const s3middleware = {
escapeForXml, escapeForXml,
objectLegalHold, objectLegalHold,
tagging, tagging,
checkDateModifiedHeaders,
validateConditionalHeaders, validateConditionalHeaders,
MD5Sum, MD5Sum,
NullStream, NullStream,
@ -95,10 +91,8 @@ export const s3middleware = {
ResultsCollector, ResultsCollector,
SubStreamInterface, SubStreamInterface,
}, },
prepareStream,
processMpuParts, processMpuParts,
retention, retention,
objectRestore,
lifecycleHelpers, lifecycleHelpers,
}; };
@ -169,7 +163,3 @@ export const storage = {
export const pensieve = { export const pensieve = {
credentialUtils: require('./lib/executables/pensieveCreds/utils'), credentialUtils: require('./lib/executables/pensieveCreds/utils'),
}; };
export const patches = {
locationConstraints,
};

View File

@ -196,9 +196,6 @@ export class Delimiter extends Extension {
} }
getCommonPrefix(key: string): string | undefined { getCommonPrefix(key: string): string | undefined {
if (!this.delimiter) {
return undefined;
}
const baseIndex = this.prefix ? this.prefix.length : 0; const baseIndex = this.prefix ? this.prefix.length : 0;
const delimiterIndex = key.indexOf(this.delimiter, baseIndex); const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
if (delimiterIndex === -1) { if (delimiterIndex === -1) {

View File

@ -183,13 +183,6 @@ export class DelimiterMaster extends Delimiter {
id: DelimiterFilterStateId.NotSkipping, id: DelimiterFilterStateId.NotSkipping,
}; };
} }
} else {
// save base implementation of the `NotSkipping` state in
// Delimiter before overriding it with ours, to be able to call it from there
this.keyHandler_NotSkipping_Delimiter = this.keyHandlers[DelimiterFilterStateId.NotSkipping];
this.setKeyHandler(
DelimiterFilterStateId.NotSkipping,
this.keyHandler_NotSkippingPrefixNorVersionsV1.bind(this));
} }
// in v1, we can directly use Delimiter's implementation, // in v1, we can directly use Delimiter's implementation,
// which is already set to the proper state // which is already set to the proper state
@ -423,20 +416,6 @@ export class DelimiterMaster extends Delimiter {
return this.filter_onNewMasterKeyV0(key, value); return this.filter_onNewMasterKeyV0(key, value);
} }
filter_onNewMasterKeyV1(key: string, value: string): FilterReturnValue {
// if this master key is a delete marker, accept it without
// adding the version to the contents
if (Version.isDeleteMarker(value)) {
return FILTER_ACCEPT;
}
// use base Delimiter's implementation
return this.keyHandler_NotSkipping_Delimiter(key, value);
}
keyHandler_NotSkippingPrefixNorVersionsV1(key: string, value: string): FilterReturnValue {
return this.filter_onNewMasterKeyV1(key, value);
}
keyHandler_SkippingVersionsV0(key: string, value: string): FilterReturnValue { keyHandler_SkippingVersionsV0(key: string, value: string): FilterReturnValue {
/* In the SkippingVersionsV0 state, skip all version keys /* In the SkippingVersionsV0 state, skip all version keys
* (<key><versionIdSeparator><version>) */ * (<key><versionIdSeparator><version>) */

View File

@ -396,11 +396,6 @@ export class DelimiterVersions extends Extension {
} }
keyHandler_NotSkippingV1(key: string, versionId: string | undefined, value: string): FilterReturnValue { keyHandler_NotSkippingV1(key: string, versionId: string | undefined, value: string): FilterReturnValue {
// NOTE: this check on PHD is only useful for Artesca, S3C
// does not use PHDs in V1 format
if (Version.isPHD(value)) {
return FILTER_ACCEPT;
}
return this.filter_onNewKey(key, versionId, value); return this.filter_onNewKey(key, versionId, value);
} }

View File

@ -14,7 +14,7 @@ function vaultSignatureCb(
err: Error | null, err: Error | null,
authInfo: { message: { body: any } }, authInfo: { message: { body: any } },
log: Logger, log: Logger,
callback: (err: Error | null, data?: any, results?: any, params?: any, infos?: any) => void, callback: (err: Error | null, data?: any, results?: any, params?: any) => void,
streamingV4Params?: any streamingV4Params?: any
) { ) {
// vaultclient API guarantees that it returns: // vaultclient API guarantees that it returns:
@ -38,9 +38,7 @@ function vaultSignatureCb(
} }
// @ts-ignore // @ts-ignore
log.addDefaultFields(auditLog); log.addDefaultFields(auditLog);
return callback(null, userInfo, authorizationResults, streamingV4Params, { return callback(null, userInfo, authorizationResults, streamingV4Params);
accountQuota: info.accountQuota || {},
});
} }
export type AuthV4RequestParams = { export type AuthV4RequestParams = {
@ -386,19 +384,4 @@ export default class Vault {
return callback(null, respBody); return callback(null, respBody);
}); });
} }
report(log: Logger, callback: (err: Error | null, data?: any) => void) {
// call the report function of the client
if (!this.client.report) {
return callback(null, {});
}
// @ts-ignore
return this.client.report(log.getSerializedUids(), (err: Error | null, obj?: any) => {
if (err) {
log.debug(`error from ${this.implName}`, { error: err });
return callback(err);
}
return callback(null, obj);
});
}
} }

View File

@ -9,12 +9,10 @@ import * as constants from '../constants';
import constructStringToSignV2 from './v2/constructStringToSign'; import constructStringToSignV2 from './v2/constructStringToSign';
import constructStringToSignV4 from './v4/constructStringToSign'; import constructStringToSignV4 from './v4/constructStringToSign';
import { convertUTCtoISO8601 } from './v4/timeUtils'; import { convertUTCtoISO8601 } from './v4/timeUtils';
import * as vaultUtilities from './backends/in_memory/vaultUtilities'; import * as vaultUtilities from './in_memory/vaultUtilities';
import * as inMemoryBackend from './backends/in_memory/Backend'; import * as backend from './in_memory/Backend';
import baseBackend from './backends/base'; import validateAuthConfig from './in_memory/validateAuthConfig';
import chainBackend from './backends/ChainBackend'; import AuthLoader from './in_memory/AuthLoader';
import validateAuthConfig from './backends/in_memory/validateAuthConfig';
import AuthLoader from './backends/in_memory/AuthLoader';
import Vault from './Vault'; import Vault from './Vault';
let vault: Vault | null = null; let vault: Vault | null = null;
@ -60,13 +58,27 @@ function extractParams(
request: any, request: any,
log: Logger, log: Logger,
awsService: string, awsService: string,
data: { [key: string]: string } data: { [key: string]: string },
oTel?: any,
) { ) {
const {
activeSpan,
activeTracerContext,
tracer,
} = oTel;
activeSpan?.addEvent('Arsenal:: entered Arsenal.auth.server.extractParams');
return tracer.startActiveSpan('Check validity of request parameters to authenticate using Arsenal', undefined, activeTracerContext, extractParamsSpan => {
extractParamsSpan.setAttributes({
'code.lineno': 75,
'code.filename': 'lib/auth/auth.ts',
'code.function': 'extractParams',
'code.url': 'https://github.com/scality/arsenal/blob/892dee6c1333fcc25c88333ee991f02830cb3c51/lib/auth/auth.ts',
});
log.trace('entered', { method: 'Arsenal.auth.server.extractParams' }); log.trace('entered', { method: 'Arsenal.auth.server.extractParams' });
const authHeader = request.headers.authorization; const authHeader = request.headers.authorization;
let version: 'v2' |'v4' | null = null; let version: 'v2' |'v4' | null = null;
let method: 'query' | 'headers' | null = null; let method: 'query' | 'headers' | null = null;
activeSpan?.addEvent('Arsenal:: Identifying auth version from authentication header');
// Identify auth version and method to dispatch to the right check function // Identify auth version and method to dispatch to the right check function
if (authHeader) { if (authHeader) {
method = 'headers'; method = 'headers';
@ -79,6 +91,7 @@ function extractParams(
} else { } else {
log.trace('invalid authorization security header', log.trace('invalid authorization security header',
{ header: authHeader }); { header: authHeader });
extractParamsSpan.end();
return { err: errors.AccessDenied }; return { err: errors.AccessDenied };
} }
} else if (data.Signature) { } else if (data.Signature) {
@ -88,21 +101,28 @@ function extractParams(
method = 'query'; method = 'query';
version = 'v4'; version = 'v4';
} }
activeSpan?.addEvent(`Arsenal::Auth versions identified: ${version}`);
// Here, either both values are set, or none is set // Here, either both values are set, or none is set
if (version !== null && method !== null) { if (version !== null && method !== null) {
if (!checkFunctions[version] || !checkFunctions[version][method]) { if (!checkFunctions[version] || !checkFunctions[version][method]) {
activeSpan?.recordException(errors.NotImplemented);
log.trace('invalid auth version or method', log.trace('invalid auth version or method',
{ version, authMethod: method }); { version, authMethod: method });
extractParamsSpan.end();
return { err: errors.NotImplemented }; return { err: errors.NotImplemented };
} }
activeSpan?.addEvent(`Arsenal:: Identified auth method version: ${version} and method: ${method}`);
activeSpan?.addEvent('Arsenal:: Checking if valid request headers and query are used to make request to vault');
log.trace('identified auth method', { version, authMethod: method }); log.trace('identified auth method', { version, authMethod: method });
return checkFunctions[version][method](request, log, data, awsService); return checkFunctions[version][method](request, log, data, awsService, { activeSpan, extractParamsSpan, activeTracerContext, tracer });
} }
// no auth info identified // no auth info identified
log.debug('assuming public user'); log.debug('assuming public user');
extractParamsSpan.end();
activeSpan?.addEvent(`Arsenal:: Identified as public user`);
return { err: null, params: publicUserInfo }; return { err: null, params: publicUserInfo };
});
} }
/** /**
@ -121,15 +141,30 @@ function doAuth(
log: Logger, log: Logger,
cb: (err: Error | null, data?: any) => void, cb: (err: Error | null, data?: any) => void,
awsService: string, awsService: string,
requestContexts: any[] | null requestContexts: any[] | null,
oTel?: any,
) { ) {
const res = extractParams(request, log, awsService, request.query); const {
activeSpan,
activeTracerContext,
tracer,
} = oTel;
activeSpan?.addEvent('Arsenal:: Routing request using doAuth() in arsenal');
activeSpan?.addEvent('Arsenal:: Extracting auth parameters and check validity of request parameters to authenticate');
const start = process.hrtime.bigint();
const res = extractParams(request, log, awsService, request.query, oTel);
const end = process.hrtime.bigint();
const duration = Number(end - start) / 1e6;
activeSpan?.addEvent(`Arsenal:: It took ${duration.toFixed(3)} ms to extract auth parameters and to check validity of request parameters to authenticate`);
if (res.err) { if (res.err) {
activeSpan?.recordException(res.err);
return cb(res.err); return cb(res.err);
} else if (res.params instanceof AuthInfo) { } else if (res.params instanceof AuthInfo) {
activeSpan?.addEvent('Arsenal:: Auth info already in the params, do not need to make a request to cloudserver');
return cb(null, res.params); return cb(null, res.params);
} }
if (requestContexts) { if (requestContexts) {
activeSpan?.addEvent('Arsenal:: Setting auth info in requestContexts');
requestContexts.forEach((requestContext) => { requestContexts.forEach((requestContext) => {
const { params } = res const { params } = res
if ('data' in params) { if ('data' in params) {
@ -142,6 +177,7 @@ function doAuth(
} }
} }
}); });
activeSpan?.addEvent('Arsenal:: Auth info set in requestContexts');
} }
// Corner cases managed, we're left with normal auth // Corner cases managed, we're left with normal auth
@ -149,10 +185,12 @@ function doAuth(
// @ts-ignore // @ts-ignore
res.params.log = log; res.params.log = log;
if (res.params.version === 2) { if (res.params.version === 2) {
activeSpan?.addEvent('Arsenal:: Sending AuthV2 call to vault');
// @ts-ignore // @ts-ignore
return vault!.authenticateV2Request(res.params, requestContexts, cb); return vault!.authenticateV2Request(res.params, requestContexts, cb);
} }
if (res.params.version === 4) { if (res.params.version === 4) {
activeSpan?.addEvent('Arsenal:: Sending AuthV4 call to vault');
// @ts-ignore // @ts-ignore
return vault!.authenticateV4Request(res.params, requestContexts, cb); return vault!.authenticateV4Request(res.params, requestContexts, cb);
} }
@ -160,6 +198,7 @@ function doAuth(
log.error('authentication method not found', { log.error('authentication method not found', {
method: 'Arsenal.auth.doAuth', method: 'Arsenal.auth.doAuth',
}); });
activeSpan?.recordException(errors.InternalError);
return cb(errors.InternalError); return cb(errors.InternalError);
} }
@ -235,7 +274,7 @@ function generateV4Headers(
headerName.startsWith('x-amz-') headerName.startsWith('x-amz-')
|| headerName.startsWith('x-scal-') || headerName.startsWith('x-scal-')
|| headerName === 'content-md5' || headerName === 'content-md5'
|| headerName === 'host', || headerName === 'host'
).sort().join(';'); ).sort().join(';');
const params = { request, signedHeaders, payloadChecksum, const params = { request, signedHeaders, payloadChecksum,
credentialScope, timestamp, query: data, credentialScope, timestamp, query: data,
@ -256,8 +295,7 @@ function generateV4Headers(
export const server = { extractParams, doAuth } export const server = { extractParams, doAuth }
export const client = { generateV4Headers, constructStringToSignV2 } export const client = { generateV4Headers, constructStringToSignV2 }
export const inMemory = { backend: inMemoryBackend, validateAuthConfig, AuthLoader } export const inMemory = { backend, validateAuthConfig, AuthLoader }
export const backends = { baseBackend, chainBackend }
export { export {
setAuthHandler as setHandler, setAuthHandler as setHandler,
AuthInfo, AuthInfo,

View File

@ -1,233 +0,0 @@
import assert from 'assert';
import async from 'async';
import errors from '../../errors';
import BaseBackend from './base';
/**
* Class that provides an authentication backend that will verify signatures
* and retrieve emails and canonical ids associated with an account using a
* given list of authentication backends and vault clients.
*
* @class ChainBackend
*/
export default class ChainBackend extends BaseBackend {
_clients: any[];
/**
* @constructor
* @param {string} service - service id
* @param {object[]} clients - list of authentication backends or vault clients
*/
constructor(service: string, clients: any[]) {
super(service);
assert(Array.isArray(clients) && clients.length > 0, 'invalid client list');
assert(clients.every(client =>
typeof client.verifySignatureV4 === 'function' &&
typeof client.verifySignatureV2 === 'function' &&
typeof client.getCanonicalIds === 'function' &&
typeof client.getEmailAddresses === 'function' &&
typeof client.checkPolicies === 'function' &&
typeof client.healthcheck === 'function',
), 'invalid client: missing required auth backend methods');
this._clients = clients;
}
/*
* try task against each client for one to be successful
*/
_tryEachClient(task: any, cb: any) {
// @ts-ignore
async.tryEach(this._clients.map(client => done => task(client, done)), cb);
}
/*
* apply task to all clients
*/
_forEachClient(task: any, cb: any) {
async.map(this._clients, task, cb);
}
verifySignatureV2(
stringToSign: string,
signatureFromRequest: string,
accessKey: string,
options: any,
callback: any,
) {
this._tryEachClient((client, done) => client.verifySignatureV2(
stringToSign,
signatureFromRequest,
accessKey,
options,
done,
), callback);
}
verifySignatureV4(
stringToSign: string,
signatureFromRequest: string,
accessKey: string,
region: string,
scopeDate: string,
options: any,
callback: any,
) {
this._tryEachClient((client, done) => client.verifySignatureV4(
stringToSign,
signatureFromRequest,
accessKey,
region,
scopeDate,
options,
done,
), callback);
}
static _mergeObjects(objectResponses: any) {
return objectResponses.reduce(
(retObj, resObj) => Object.assign(retObj, resObj.message.body),
{});
}
getCanonicalIds(emailAddresses: string[], options: any, callback: any) {
this._forEachClient(
(client, done) => client.getCanonicalIds(emailAddresses, options, done),
(err, res) => {
if (err) {
return callback(err);
}
// TODO: atm naive merge, better handling of conflicting email results
return callback(null, {
message: {
body: ChainBackend._mergeObjects(res),
},
});
});
}
getEmailAddresses(canonicalIDs: string[], options: any, callback: any) {
this._forEachClient(
(client, done) => client.getEmailAddresses(canonicalIDs, options, done),
(err, res) => {
if (err) {
return callback(err);
}
return callback(null, {
message: {
body: ChainBackend._mergeObjects(res),
},
});
});
}
/*
* merge policy responses into a single message
*/
static _mergePolicies(policyResponses: any) {
const policyMap: any = {};
policyResponses.forEach(resp => {
if (!resp.message || !Array.isArray(resp.message.body)) {
return;
}
const check = (policy) => {
const key = (policy.arn || '') + (policy.versionId || '') + (policy.action || '');
if (!policyMap[key] || !policyMap[key].isAllowed) {
policyMap[key] = policy;
}
// else is duplicate policy
};
resp.message.body.forEach(policy => {
if (Array.isArray(policy)) {
policy.forEach(authResult => check(authResult));
} else {
check(policy);
}
});
});
return Object.keys(policyMap).map(key => {
const policyRes: any = { isAllowed: policyMap[key].isAllowed };
if (policyMap[key].arn !== '') {
policyRes.arn = policyMap[key].arn;
}
if (policyMap[key].versionId) {
policyRes.versionId = policyMap[key].versionId;
}
if (policyMap[key].isImplicit !== undefined) {
policyRes.isImplicit = policyMap[key].isImplicit;
}
if (policyMap[key].action) {
policyRes.action = policyMap[key].action;
}
return policyRes;
});
}
/*
response format:
{ message: {
body: [{}],
code: number,
message: string,
} }
*/
checkPolicies(requestContextParams: any, userArn: string, options: any, callback: any) {
this._forEachClient((client, done) => client.checkPolicies(
requestContextParams,
userArn,
options,
done,
), (err, res) => {
if (err) {
return callback(err);
}
return callback(null, {
message: {
body: ChainBackend._mergePolicies(res),
},
});
});
}
healthcheck(reqUid: string, callback: any) {
this._forEachClient((client, done) =>
client.healthcheck(reqUid, (err, res) => done(null, {
error: !!err ? err : null,
status: res,
}),
), (err, res) => {
if (err) {
return callback(err);
}
const isError = res.some(results => !!results.error);
if (isError) {
return callback(errors.InternalError, res);
}
return callback(null, res);
});
}
report(reqUid: string, callback: any) {
this._forEachClient((client, done) =>
client.report(reqUid, done),
(err, res) => {
if (err) {
return callback(err);
}
const mergedRes = res.reduce((acc, val) => {
Object.keys(val).forEach(k => {
acc[k] = val[k];
});
return acc;
}, {});
return callback(null, mergedRes);
});
}
}

View File

@ -1,96 +0,0 @@
import errors from '../../errors';
/**
* Base backend class
*
* @class BaseBackend
*/
export default class BaseBackend {
service: string;
/**
* @constructor
* @param {string} service - service identifer for construction arn
*/
constructor(service: string) {
this.service = service;
}
/** verifySignatureV2
* @param stringToSign - string to sign built per AWS rules
* @param signatureFromRequest - signature sent with request
* @param accessKey - account accessKey
* @param options - contains algorithm (SHA1 or SHA256)
* @param callback - callback with either error or user info
* @return calls callback
*/
verifySignatureV2(
stringToSign: string,
signatureFromRequest: string,
accessKey: string,
options: any,
callback: any
) {
return callback(errors.AuthMethodNotImplemented);
}
/** verifySignatureV4
* @param stringToSign - string to sign built per AWS rules
* @param signatureFromRequest - signature sent with request
* @param accessKey - account accessKey
* @param region - region specified in request credential
* @param scopeDate - date specified in request credential
* @param options - options to send to Vault
* (just contains reqUid for logging in Vault)
* @param callback - callback with either error or user info
* @return calls callback
*/
verifySignatureV4(
stringToSign: string,
signatureFromRequest: string,
accessKey: string,
region: string,
scopeDate: string,
options: any,
callback: any
) {
return callback(errors.AuthMethodNotImplemented);
}
/**
* Gets canonical ID's for a list of accounts
* based on email associated with account
* @param emails - list of email addresses
* @param options - to send log id to vault
* @param callback - callback to calling function
* @returns callback with either error or
* object with email addresses as keys and canonical IDs
* as values
*/
getCanonicalIds(emails: string[], options: any, callback: any) {
return callback(errors.AuthMethodNotImplemented);
}
/**
* Gets email addresses (referred to as diplay names for getACL's)
* for a list of accounts based on canonical IDs associated with account
* @param canonicalIDs - list of canonicalIDs
* @param options - to send log id to vault
* @param callback - callback to calling function
* @returns callback with either error or
* an object from Vault containing account canonicalID
* as each object key and an email address as the value (or "NotFound")
*/
getEmailAddresses(canonicalIDs: string[], options: any, callback: any) {
return callback(errors.AuthMethodNotImplemented);
}
checkPolicies(requestContextParams: any, userArn: string, options: any, callback: any) {
return callback(null, { message: { body: [] } });
}
healthcheck(reqUid: string, callback: any) {
return callback(null, { code: 200, message: 'OK' });
}
}

View File

@ -4,7 +4,7 @@ import joi from 'joi';
import werelogs from 'werelogs'; import werelogs from 'werelogs';
import * as types from './types'; import * as types from './types';
import { Account, Accounts } from './types'; import { Account, Accounts } from './types';
import ARN from '../../../models/ARN'; import ARN from '../../models/ARN';
/** Load authentication information from files or pre-loaded account objects */ /** Load authentication information from files or pre-loaded account objects */
export default class AuthLoader { export default class AuthLoader {

View File

@ -1,9 +1,7 @@
import crypto from 'crypto'; import * as crypto from 'crypto';
import { Logger } from 'werelogs'; import errors from '../../errors';
import errors from '../../../errors';
import { calculateSigningKey, hashSignature } from './vaultUtilities'; import { calculateSigningKey, hashSignature } from './vaultUtilities';
import Indexer from './Indexer'; import Indexer from './Indexer';
import BaseBackend from '../base';
import { Accounts } from './types'; import { Accounts } from './types';
function _formatResponse(userInfoToSend: any) { function _formatResponse(userInfoToSend: any) {
@ -17,32 +15,26 @@ function _formatResponse(userInfoToSend: any) {
/** /**
* Class that provides a memory backend for verifying signatures and getting * Class that provides a memory backend for verifying signatures and getting
* emails and canonical ids associated with an account. * emails and canonical ids associated with an account.
*
* @class InMemoryBackend
*/ */
class InMemoryBackend extends BaseBackend { class Backend {
indexer: Indexer; indexer: Indexer;
formatResponse: any; service: string;
/** constructor(service: string, indexer: Indexer) {
* @constructor this.service = service;
* @param service - service identifer for construction arn
* @param indexer - indexer instance for retrieving account info
* @param formatter - function which accepts user info to send
* back and returns it in an object
*/
constructor(service: string, indexer: Indexer, formatter: typeof _formatResponse) {
super(service);
this.indexer = indexer; this.indexer = indexer;
this.formatResponse = formatter;
} }
// CODEQUALITY-TODO-SYNC Should be synchronous
verifySignatureV2( verifySignatureV2(
stringToSign: string, stringToSign: string,
signatureFromRequest: string, signatureFromRequest: string,
accessKey: string, accessKey: string,
options: any, options: { algo: 'SHA256' | 'SHA1' },
callback: any, callback: (
error: Error | null,
data?: ReturnType<typeof _formatResponse>
) => void
) { ) {
const entity = this.indexer.getEntityByKey(accessKey); const entity = this.indexer.getEntityByKey(accessKey);
if (!entity) { if (!entity) {
@ -58,21 +50,27 @@ class InMemoryBackend extends BaseBackend {
accountDisplayName: this.indexer.getAcctDisplayName(entity), accountDisplayName: this.indexer.getAcctDisplayName(entity),
canonicalID: entity.canonicalID, canonicalID: entity.canonicalID,
arn: entity.arn, arn: entity.arn,
// TODO Why?
// @ts-ignore // @ts-ignore
IAMdisplayName: entity.IAMdisplayName, IAMdisplayName: entity.IAMdisplayName,
}; };
const vaultReturnObject = this.formatResponse(userInfoToSend); const vaultReturnObject = _formatResponse(userInfoToSend);
return callback(null, vaultReturnObject); return callback(null, vaultReturnObject);
} }
// TODO Options not used. Why ?
// CODEQUALITY-TODO-SYNC Should be synchronous
verifySignatureV4( verifySignatureV4(
stringToSign: string, stringToSign: string,
signatureFromRequest: string, signatureFromRequest: string,
accessKey: string, accessKey: string,
region: string, region: string,
scopeDate: string, scopeDate: string,
options: any, _options: { algo: 'SHA256' | 'SHA1' },
callback: any, callback: (
err: Error | null,
data?: ReturnType<typeof _formatResponse>
) => void
) { ) {
const entity = this.indexer.getEntityByKey(accessKey); const entity = this.indexer.getEntityByKey(accessKey);
if (!entity) { if (!entity) {
@ -89,14 +87,21 @@ class InMemoryBackend extends BaseBackend {
accountDisplayName: this.indexer.getAcctDisplayName(entity), accountDisplayName: this.indexer.getAcctDisplayName(entity),
canonicalID: entity.canonicalID, canonicalID: entity.canonicalID,
arn: entity.arn, arn: entity.arn,
// TODO Why?
// @ts-ignore // @ts-ignore
IAMdisplayName: entity.IAMdisplayName, IAMdisplayName: entity.IAMdisplayName,
}; };
const vaultReturnObject = this.formatResponse(userInfoToSend); const vaultReturnObject = _formatResponse(userInfoToSend);
return callback(null, vaultReturnObject); return callback(null, vaultReturnObject);
} }
getCanonicalIds(emails: string[], log: Logger, cb: any) { // TODO log not used. Why ?
// CODEQUALITY-TODO-SYNC Should be synchronous
getCanonicalIds(
emails: string[],
_log: any,
cb: (err: null, data: { message: { body: any } }) => void
) {
const results = {}; const results = {};
emails.forEach(email => { emails.forEach(email => {
const lowercasedEmail = email.toLowerCase(); const lowercasedEmail = email.toLowerCase();
@ -116,7 +121,13 @@ class InMemoryBackend extends BaseBackend {
return cb(null, vaultReturnObject); return cb(null, vaultReturnObject);
} }
getEmailAddresses(canonicalIDs: string[], options: any, cb: any) { // TODO options not used. Why ?
// CODEQUALITY-TODO-SYNC Should be synchronous
getEmailAddresses(
canonicalIDs: string[],
_options: any,
cb: (err: null, data: { message: { body: any } }) => void
) {
const results = {}; const results = {};
canonicalIDs.forEach(canonicalId => { canonicalIDs.forEach(canonicalId => {
const foundEntity = this.indexer.getEntityByCanId(canonicalId); const foundEntity = this.indexer.getEntityByCanId(canonicalId);
@ -134,17 +145,24 @@ class InMemoryBackend extends BaseBackend {
return cb(null, vaultReturnObject); return cb(null, vaultReturnObject);
} }
// TODO options not used. Why ?
// CODEQUALITY-TODO-SYNC Should be synchronous
/** /**
* Gets accountIds for a list of accounts based on * Gets accountIds for a list of accounts based on
* the canonical IDs associated with the account * the canonical IDs associated with the account
* @param canonicalIDs - list of canonicalIDs * @param canonicalIDs - list of canonicalIDs
* @param options - to send log id to vault * @param _options - to send log id to vault
* @param cb - callback to calling function * @param cb - callback to calling function
* @returns callback with either error or * @returns The next is wrong. Here to keep archives.
* callback with either error or
* an object from Vault containing account canonicalID * an object from Vault containing account canonicalID
* as each object key and an accountId as the value (or "NotFound") * as each object key and an accountId as the value (or "NotFound")
*/ */
getAccountIds(canonicalIDs: string[], options: any, cb: any) { getAccountIds(
canonicalIDs: string[],
_options: any,
cb: (err: null, data: { message: { body: any } }) => void
) {
const results = {}; const results = {};
canonicalIDs.forEach(canonicalID => { canonicalIDs.forEach(canonicalID => {
const foundEntity = this.indexer.getEntityByCanId(canonicalID); const foundEntity = this.indexer.getEntityByCanId(canonicalID);
@ -161,34 +179,16 @@ class InMemoryBackend extends BaseBackend {
}; };
return cb(null, vaultReturnObject); return cb(null, vaultReturnObject);
} }
report(log: Logger, callback: any) {
return callback(null, {});
}
} }
class S3AuthBackend extends Backend {
class S3AuthBackend extends InMemoryBackend { constructor(authdata: Accounts) {
/** super('s3', new Indexer(authdata));
* @constructor
* @param authdata - the authentication config file's data
* @param authdata.accounts - array of account objects
* @param authdata.accounts[].name - account name
* @param authdata.accounts[].email - account email
* @param authdata.accounts[].arn - IAM resource name
* @param authdata.accounts[].canonicalID - account canonical ID
* @param authdata.accounts[].shortid - short account ID
* @param authdata.accounts[].keys - array of key objects
* @param authdata.accounts[].keys[].access - access key
* @param authdata.accounts[].keys[].secret - secret key
*/
constructor(authdata?: Accounts) {
super('s3', new Indexer(authdata), _formatResponse);
} }
refreshAuthData(authData?: Accounts) { refreshAuthData(authData: Accounts) {
this.indexer = new Indexer(authData); this.indexer = new Indexer(authData);
} }
} }
export { S3AuthBackend as s3 } export { S3AuthBackend as s3 };

View File

@ -5,64 +5,83 @@ import constructStringToSign from './constructStringToSign';
import checkRequestExpiry from './checkRequestExpiry'; import checkRequestExpiry from './checkRequestExpiry';
import algoCheck from './algoCheck'; import algoCheck from './algoCheck';
export function check(request: any, log: Logger, data: { [key: string]: string }) { export function check(request: any, log: Logger, data: { [key: string]: string }, oTel: any) {
const { activeSpan, extractParamsSpan, activeTracerContext, tracer } = oTel;
activeSpan?.addEvent('Entered V2 header auth check');
log.trace('running header auth check'); log.trace('running header auth check');
activeSpan?.addEvent('Running header auth check');
const headers = request.headers; const headers = request.headers;
activeSpan?.addEvent('Extracting security token');
const token = headers['x-amz-security-token']; const token = headers['x-amz-security-token'];
if (token && !constants.iamSecurityToken.pattern.test(token)) { if (token && !constants.iamSecurityToken.pattern.test(token)) {
log.debug('invalid security token', { token }); log.debug('invalid security token', { token });
activeSpan.recordException(errors.InvalidToken);
extractParamsSpan.end();
return { err: errors.InvalidToken }; return { err: errors.InvalidToken };
} }
activeSpan?.addEvent('Extracted security token');
activeSpan?.addEvent('Checking timestamp');
// Check to make sure timestamp is within 15 minutes of current time // Check to make sure timestamp is within 15 minutes of current time
let timestamp = headers['x-amz-date'] ? let timestamp = headers['x-amz-date'] ? headers['x-amz-date'] : headers.date;
headers['x-amz-date'] : headers.date;
timestamp = Date.parse(timestamp); timestamp = Date.parse(timestamp);
if (!timestamp) { if (!timestamp) {
log.debug('missing or invalid date header', log.debug('missing or invalid date header', { method: 'auth/v2/headerAuthCheck.check' });
{ method: 'auth/v2/headerAuthCheck.check' }); activeSpan.recordException(errors.AccessDenied.customizeDescription('Authentication requires a valid Date or x-amz-date header'));
return { err: errors.AccessDenied. extractParamsSpan.end();
customizeDescription('Authentication requires a valid Date or ' + return { err: errors.AccessDenied.customizeDescription('Authentication requires a valid Date or x-amz-date header') };
'x-amz-date header') };
} }
activeSpan?.addEvent('Checked timestamp');
activeSpan?.addEvent('Checking request expiry');
const err = checkRequestExpiry(timestamp, log); const err = checkRequestExpiry(timestamp, log);
if (err) { if (err) {
activeSpan.recordException(err);
extractParamsSpan.end();
return { err }; return { err };
} }
activeSpan?.addEvent('Checked request expiry');
// Authorization Header should be activeSpan?.addEvent('Extracting authorization header');
// in the format of 'AWS AccessKey:Signature' // Authorization Header should be in the format of 'AWS AccessKey:Signature'
const authInfo = headers.authorization; const authInfo = headers.authorization;
activeSpan?.addEvent('Extracted authorization header');
if (!authInfo) { if (!authInfo) {
log.debug('missing authorization security header'); log.debug('missing authorization security header');
activeSpan.recordException(errors.MissingSecurityHeader);
extractParamsSpan.end();
return { err: errors.MissingSecurityHeader }; return { err: errors.MissingSecurityHeader };
} }
const semicolonIndex = authInfo.indexOf(':'); const semicolonIndex = authInfo.indexOf(':');
if (semicolonIndex < 0) { if (semicolonIndex < 0) {
log.debug('invalid authorization header', { authInfo }); log.debug('invalid authorization header', { authInfo });
activeSpan.recordException(errors.InvalidArgument);
extractParamsSpan.end();
return { err: errors.InvalidArgument }; return { err: errors.InvalidArgument };
} }
const accessKey = semicolonIndex > 4 ? const accessKey = semicolonIndex > 4 ? authInfo.substring(4, semicolonIndex).trim() : undefined;
authInfo.substring(4, semicolonIndex).trim() : undefined;
if (typeof accessKey !== 'string' || accessKey.length === 0) { if (typeof accessKey !== 'string' || accessKey.length === 0) {
log.trace('invalid authorization header', { authInfo }); log.trace('invalid authorization header', { authInfo });
activeSpan.recordException(errors.MissingSecurityHeader);
extractParamsSpan.end();
return { err: errors.MissingSecurityHeader }; return { err: errors.MissingSecurityHeader };
} }
// @ts-ignore // @ts-ignore
log.addDefaultFields({ accessKey }); log.addDefaultFields({ accessKey });
const signatureFromRequest = authInfo.substring(semicolonIndex + 1).trim(); const signatureFromRequest = authInfo.substring(semicolonIndex + 1).trim();
log.trace('signature from request', { signatureFromRequest }); log.trace('signature from request', { signatureFromRequest });
activeSpan?.addEvent('Extracting signature from request');
activeSpan?.addEvent('Constructing string to sign');
const stringToSign = constructStringToSign(request, data, log); const stringToSign = constructStringToSign(request, data, log);
log.trace('constructed string to sign', { stringToSign }); log.trace('constructed string to sign', { stringToSign });
activeSpan?.addEvent('Constructed string to sign v2 headers');
const algo = algoCheck(signatureFromRequest.length); const algo = algoCheck(signatureFromRequest.length);
log.trace('algo for calculating signature', { algo }); log.trace('algo for calculating signature', { algo });
activeSpan?.addEvent('Checked algorithm for calculating signature');
if (algo === undefined) { if (algo === undefined) {
activeSpan.recordException(errors.InvalidArgument);
extractParamsSpan.end();
return { err: errors.InvalidArgument }; return { err: errors.InvalidArgument };
} }
activeSpan?.addEvent('Exiting V2 header auth check');
extractParamsSpan.end();
return { return {
err: null, err: null,
params: { params: {
@ -80,3 +99,4 @@ export function check(request: any, log: Logger, data: { [key: string]: string }
}, },
}; };
} }

View File

@ -4,19 +4,26 @@ import * as constants from '../../constants';
import algoCheck from './algoCheck'; import algoCheck from './algoCheck';
import constructStringToSign from './constructStringToSign'; import constructStringToSign from './constructStringToSign';
export function check(request: any, log: Logger, data: { [key: string]: string }) { export function check(request: any, log: Logger, data: { [key: string]: string }, oTel: any) {
const { activeSpan, extractParamsSpan, activeTracerContext, tracer } = oTel;
activeSpan?.addEvent('Entered query auth check');
log.trace('running query auth check'); log.trace('running query auth check');
activeSpan?.addEvent('Running query auth check');
if (request.method === 'POST') { if (request.method === 'POST') {
log.debug('query string auth not supported for post requests'); log.debug('query string auth not supported for post requests');
activeSpan.recordException(errors.NotImplemented);
extractParamsSpan.end();
return { err: errors.NotImplemented }; return { err: errors.NotImplemented };
} }
const token = data.SecurityToken; const token = data.SecurityToken;
activeSpan?.addEvent('Extracting security token');
if (token && !constants.iamSecurityToken.pattern.test(token)) { if (token && !constants.iamSecurityToken.pattern.test(token)) {
log.debug('invalid security token', { token }); log.debug('invalid security token', { token });
activeSpan.recordException(errors.InvalidToken);
extractParamsSpan.end();
return { err: errors.InvalidToken }; return { err: errors.InvalidToken };
} }
activeSpan?.addEvent('Extracted security token');
/* /*
Check whether request has expired or if Check whether request has expired or if
expires parameter is more than 604800000 milliseconds expires parameter is more than 604800000 milliseconds
@ -25,47 +32,57 @@ export function check(request: any, log: Logger, data: { [key: string]: string }
multiply by 1000 to obtain multiply by 1000 to obtain
milliseconds to compare to Date.now() milliseconds to compare to Date.now()
*/ */
activeSpan?.addEvent('Checking expiration time');
const expirationTime = parseInt(data.Expires, 10) * 1000; const expirationTime = parseInt(data.Expires, 10) * 1000;
if (Number.isNaN(expirationTime)) { if (Number.isNaN(expirationTime)) {
log.debug('invalid expires parameter', log.debug('invalid expires parameter', { expires: data.Expires });
{ expires: data.Expires }); activeSpan.recordException(errors.MissingSecurityHeader);
extractParamsSpan.end();
return { err: errors.MissingSecurityHeader }; return { err: errors.MissingSecurityHeader };
} }
activeSpan?.addEvent('Checked expiration time');
const currentTime = Date.now(); const currentTime = Date.now();
const preSignedURLExpiry = process.env.PRE_SIGN_URL_EXPIRY const preSignedURLExpiry = process.env.PRE_SIGN_URL_EXPIRY
&& !Number.isNaN(process.env.PRE_SIGN_URL_EXPIRY) && !Number.isNaN(process.env.PRE_SIGN_URL_EXPIRY)
? Number.parseInt(process.env.PRE_SIGN_URL_EXPIRY, 10) ? Number.parseInt(process.env.PRE_SIGN_URL_EXPIRY, 10)
: constants.defaultPreSignedURLExpiry * 1000; : constants.defaultPreSignedURLExpiry * 1000;
if (expirationTime > currentTime + preSignedURLExpiry) { if (expirationTime > currentTime + preSignedURLExpiry) {
log.debug('expires parameter too far in future', log.debug('expires parameter too far in future', { expires: request.query.Expires });
{ expires: request.query.Expires }); activeSpan.recordException(errors.AccessDenied);
extractParamsSpan.end();
return { err: errors.AccessDenied }; return { err: errors.AccessDenied };
} }
if (currentTime > expirationTime) { if (currentTime > expirationTime) {
log.debug('current time exceeds expires time', log.debug('current time exceeds expires time', { expires: request.query.Expires });
{ expires: request.query.Expires }); activeSpan.recordException(errors.RequestTimeTooSkewed);
extractParamsSpan.end();
return { err: errors.RequestTimeTooSkewed }; return { err: errors.RequestTimeTooSkewed };
} }
const accessKey = data.AWSAccessKeyId; const accessKey = data.AWSAccessKeyId;
// @ts-ignore // @ts-ignore
log.addDefaultFields({ accessKey }); log.addDefaultFields({ accessKey });
const signatureFromRequest = decodeURIComponent(data.Signature); const signatureFromRequest = decodeURIComponent(data.Signature);
log.trace('signature from request', { signatureFromRequest }); log.trace('signature from request', { signatureFromRequest });
activeSpan?.addEvent('Extracting signature from request');
if (!accessKey || !signatureFromRequest) { if (!accessKey || !signatureFromRequest) {
log.debug('invalid access key/signature parameters'); log.debug('invalid access key/signature parameters');
activeSpan.recordException(errors.MissingSecurityHeader);
extractParamsSpan.end();
return { err: errors.MissingSecurityHeader }; return { err: errors.MissingSecurityHeader };
} }
const stringToSign = constructStringToSign(request, data, log); const stringToSign = constructStringToSign(request, data, log);
log.trace('constructed string to sign', { stringToSign }); log.trace('constructed string to sign', { stringToSign });
activeSpan?.addEvent('Constructed string to sign v2 query');
const algo = algoCheck(signatureFromRequest.length); const algo = algoCheck(signatureFromRequest.length);
log.trace('algo for calculating signature', { algo }); log.trace('algo for calculating signature', { algo });
activeSpan?.addEvent('Checked algorithm for calculating signature');
if (algo === undefined) { if (algo === undefined) {
activeSpan.recordException(errors.InvalidArgument);
extractParamsSpan.end();
return { err: errors.InvalidArgument }; return { err: errors.InvalidArgument };
} }
activeSpan?.addEvent('Exiting query auth check');
extractParamsSpan.end();
return { return {
err: null, err: null,
params: { params: {
@ -82,3 +99,4 @@ export function check(request: any, log: Logger, data: { [key: string]: string }
}, },
}; };
} }

View File

@ -42,40 +42,37 @@ export default function awsURIencode(
if (typeof input !== 'string') { if (typeof input !== 'string') {
return ''; return '';
} }
let encoded = "";
// precalc slash and star based on configs
const slash = encodeSlash === undefined || encodeSlash ? '%2F' : '/'; const slash = encodeSlash === undefined || encodeSlash ? '%2F' : '/';
const star = noEncodeStar !== undefined && noEncodeStar ? '*' : '%2A'; const star = noEncodeStar !== undefined && noEncodeStar ? '*' : '%2A';
for (let i = 0; i < input.length; i++) { const encoded: string[] = [];
let ch = input.charAt(i);
if ((ch >= 'A' && ch <= 'Z') || const charArray = Array.from(input);
(ch >= 'a' && ch <= 'z') || for (const ch of charArray) {
(ch >= '0' && ch <= '9') || switch (true) {
ch === '_' || ch === '-' || case ch >= 'A' && ch <= 'Z':
ch === '~' || ch === '.') { case ch >= 'a' && ch <= 'z':
encoded = encoded.concat(ch); case ch >= '0' && ch <= '9':
} else if (ch === ' ') { case ch === '-':
encoded = encoded.concat('%20'); case ch === '_':
} else if (ch === '/') { case ch === '~':
encoded = encoded.concat(slash); case ch === '.':
} else if (ch === '*') { encoded.push(ch);
encoded = encoded.concat(star); break;
} else { case ch === '/':
if (ch >= '\uD800' && ch <= '\uDBFF') { encoded.push(slash);
// If this character is a high surrogate peek the next character break;
// and join it with this one if the next character is a low case ch === '*':
// surrogate. encoded.push(star);
// Otherwise the encoded URI will contain the two surrogates as break;
// two distinct UTF-8 sequences which is not valid UTF-8. case ch === ' ':
if (i + 1 < input.length) { encoded.push('%20');
const ch2 = input.charAt(i + 1); break;
if (ch2 >= '\uDC00' && ch2 <= '\uDFFF') { default:
i++; encoded.push(_toHexUTF8(ch));
ch += ch2; break;
} }
} }
} return encoded.join('');
encoded = encoded.concat(_toHexUTF8(ch));
}
}
return encoded;
} }

View File

@ -17,7 +17,7 @@ export default function constructStringToSign(params: {
log?: Logger; log?: Logger;
proxyPath?: string; proxyPath?: string;
awsService: string; awsService: string;
}): string | Error { }, oTel?: any,): string | Error {
const { const {
request, request,
signedHeaders, signedHeaders,
@ -29,7 +29,12 @@ export default function constructStringToSign(params: {
proxyPath, proxyPath,
} = params; } = params;
const path = proxyPath || request.path; const path = proxyPath || request.path;
const {
activeSpan,
activeTracerContext,
tracer,
} = oTel;
activeSpan?.addEvent('Constructing canonical request for Authv4');
const canonicalReqResult = createCanonicalRequest({ const canonicalReqResult = createCanonicalRequest({
pHttpVerb: request.method, pHttpVerb: request.method,
pResource: path, pResource: path,
@ -38,8 +43,7 @@ export default function constructStringToSign(params: {
pSignedHeaders: signedHeaders, pSignedHeaders: signedHeaders,
payloadChecksum, payloadChecksum,
service: params.awsService, service: params.awsService,
}); }, oTel);
// TODO Why that line? // TODO Why that line?
// @ts-ignore // @ts-ignore
if (canonicalReqResult instanceof Error) { if (canonicalReqResult instanceof Error) {
@ -51,9 +55,13 @@ export default function constructStringToSign(params: {
if (log) { if (log) {
log.debug('constructed canonicalRequest', { canonicalReqResult }); log.debug('constructed canonicalRequest', { canonicalReqResult });
} }
const createSignatureSpan = tracer.startSpan('Creating signature hash for AuthV4 using crypto sha256');
activeSpan?.addEvent('Creating signature hash for AuthV4 using crypto sha256');
const sha256 = crypto.createHash('sha256'); const sha256 = crypto.createHash('sha256');
const canonicalHex = sha256.update(canonicalReqResult, 'binary') const canonicalHex = sha256.update(canonicalReqResult, 'binary')
.digest('hex'); .digest('hex');
activeSpan?.addEvent('Created signature hash for AuthV4 using crypto sha256');
createSignatureSpan.end();
const stringToSign = `AWS4-HMAC-SHA256\n${timestamp}\n` + const stringToSign = `AWS4-HMAC-SHA256\n${timestamp}\n` +
`${credentialScope}\n${canonicalHex}`; `${credentialScope}\n${canonicalHex}`;
return stringToSign; return stringToSign;

View File

@ -19,8 +19,16 @@ export default function createCanonicalRequest(
pSignedHeaders: any; pSignedHeaders: any;
service: string; service: string;
payloadChecksum: string; payloadChecksum: string;
} },
oTel?: any,
) { ) {
const {
activeSpan,
activeTracerContext,
tracer,
} = oTel;
activeSpan?.addEvent('Entered createCanonicalRequest');
const pHttpVerb = params.pHttpVerb; const pHttpVerb = params.pHttpVerb;
const pResource = params.pResource; const pResource = params.pResource;
const pQuery = params.pQuery; const pQuery = params.pQuery;
@ -28,35 +36,34 @@ export default function createCanonicalRequest(
const pSignedHeaders = params.pSignedHeaders; const pSignedHeaders = params.pSignedHeaders;
const service = params.service; const service = params.service;
let payloadChecksum = params.payloadChecksum; let payloadChecksum = params.payloadChecksum;
const payloadChecksumSpan = tracer.startSpan('ComputePayloadChecksum');
if (!payloadChecksum) { if (!payloadChecksum) {
if (pHttpVerb === 'GET') { if (pHttpVerb === 'GET') {
payloadChecksum = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b' + payloadChecksum = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b' +
'934ca495991b7852b855'; '934ca495991b7852b855';
} else if (pHttpVerb === 'POST') { } else if (pHttpVerb === 'POST') {
let notEncodeStar = false; let notEncodeStar = false;
// The java sdk does not encode the '*' parameter to compute the
// signature, if the user-agent is recognized, we need to keep
// the plain '*' as well.
if (/aws-sdk-java\/[0-9.]+/.test(pHeaders['user-agent'])) { if (/aws-sdk-java\/[0-9.]+/.test(pHeaders['user-agent'])) {
notEncodeStar = true; notEncodeStar = true;
} }
let payload = queryString.stringify(pQuery, undefined, undefined, { let payload = queryString.stringify(pQuery, undefined, undefined, {
encodeURIComponent: input => awsURIencode(input, true, encodeURIComponent: input => awsURIencode(input, true, notEncodeStar),
notEncodeStar),
}); });
payload = payload.replace(/%20/g, '+'); payload = payload.replace(/%20/g, '+');
payloadChecksum = crypto.createHash('sha256') payloadChecksum = crypto.createHash('sha256')
.update(payload, 'binary').digest('hex').toLowerCase(); .update(payload, 'binary').digest('hex').toLowerCase();
} }
} }
payloadChecksumSpan.end();
const canonicalURISpan = tracer.startSpan('ComputeCanonicalURI');
const canonicalURI = !!pResource ? awsURIencode(pResource, false) : '/'; const canonicalURI = !!pResource ? awsURIencode(pResource, false) : '/';
canonicalURISpan.end();
// canonical query string const canonicalQueryStrSpan = tracer.startSpan('ComputeCanonicalQueryStr');
let canonicalQueryStr = ''; let canonicalQueryStr = '';
if (pQuery && !((service === 'iam' || service === 'ring' || if (pQuery && !((service === 'iam' || service === 'ring' || service === 'sts') && pHttpVerb === 'POST')) {
service === 'sts') &&
pHttpVerb === 'POST')) {
const sortedQueryParams = Object.keys(pQuery).sort().map(key => { const sortedQueryParams = Object.keys(pQuery).sort().map(key => {
const encodedKey = awsURIencode(key); const encodedKey = awsURIencode(key);
const value = pQuery[key] ? awsURIencode(pQuery[key]) : ''; const value = pQuery[key] ? awsURIencode(pQuery[key]) : '';
@ -64,32 +71,54 @@ export default function createCanonicalRequest(
}); });
canonicalQueryStr = sortedQueryParams.join('&'); canonicalQueryStr = sortedQueryParams.join('&');
} }
canonicalQueryStrSpan.end();
// signed headers const signedHeadersSpan = tracer.startSpan('SortSignedHeadersAlphabetically');
activeSpan?.addEvent('Splitting signed headers using deliminator: ;');
const signedHeadersList = pSignedHeaders.split(';'); const signedHeadersList = pSignedHeaders.split(';');
activeSpan?.addEvent('Split signed headers using ; as deliminator');
activeSpan?.addEvent('Sorting signed headers alphabetically');
signedHeadersList.sort((a: any, b: any) => a.localeCompare(b)); signedHeadersList.sort((a: any, b: any) => a.localeCompare(b));
activeSpan?.addEvent('Sorted signed headers alphabetically');
activeSpan?.addEvent('Joining signed headers using deliminator: ;');
const signedHeaders = signedHeadersList.join(';'); const signedHeaders = signedHeadersList.join(';');
activeSpan?.addEvent('Joined signed headers using ; as deliminator');
activeSpan.setAttributes({
'signedHeaders.request': pSignedHeaders,
'signedHeaders.request.authv4': signedHeaders,
});
signedHeadersSpan.setAttributes({
'signedHeaders.request': pSignedHeaders,
'signedHeaders.request.authv4': signedHeaders,
'code.url': 'https://github.com/scality/arsenal/blob/c6bb489adeb7419fdbcdf01db2b46a593747530d/lib/auth/v4/createCanonicalRequest.ts#L76',
'code.function': 'createCanonicalRequest',
'code.lineno': 76,
'code.filename': 'lib/auth/v4/createCanonicalRequest.ts',
});
signedHeadersSpan.end();
// canonical headers const canonicalHeadersListSpan = tracer.startSpan('FormatHeadersToMatch CanonicalHeadersList');
const canonicalHeadersList = signedHeadersList.map((signedHeader: any) => { const canonicalHeadersList = signedHeadersList.map((signedHeader: any) => {
if (pHeaders[signedHeader] !== undefined) { if (pHeaders[signedHeader] !== undefined) {
const trimmedHeader = pHeaders[signedHeader] const trimmedHeader = pHeaders[signedHeader]
.trim().replace(/\s+/g, ' '); .trim().replace(/\s+/g, ' ');
return `${signedHeader}:${trimmedHeader}\n`; return `${signedHeader}:${trimmedHeader}\n`;
} }
// nginx will strip the actual expect header so add value of
// header back here if it was included as a signed header
if (signedHeader === 'expect') { if (signedHeader === 'expect') {
return `${signedHeader}:100-continue\n`; return `${signedHeader}:100-continue\n`;
} }
// handle case where signed 'header' is actually query param
return `${signedHeader}:${pQuery[signedHeader]}\n`; return `${signedHeader}:${pQuery[signedHeader]}\n`;
}); });
canonicalHeadersListSpan.end();
const canonicalHeadersSpan = tracer.startSpan('JoinAllCanonicalHeaders using no deliminator');
const canonicalHeaders = canonicalHeadersList.join(''); const canonicalHeaders = canonicalHeadersList.join('');
canonicalHeadersSpan.end();
const canonicalRequestSpan = tracer.startSpan('ConstructCanonicalRequest');
const canonicalRequest = `${pHttpVerb}\n${canonicalURI}\n` + const canonicalRequest = `${pHttpVerb}\n${canonicalURI}\n` +
`${canonicalQueryStr}\n${canonicalHeaders}\n` + `${canonicalQueryStr}\n${canonicalHeaders}\n` +
`${signedHeaders}\n${payloadChecksum}`; `${signedHeaders}\n${payloadChecksum}`;
canonicalRequestSpan.end();
return canonicalRequest; return canonicalRequest;
} }

View File

@ -21,70 +21,78 @@ import {
* POST request * POST request
* @param awsService - Aws service ('iam' or 's3') * @param awsService - Aws service ('iam' or 's3')
*/ */
export function check( export function check(request: any, log: Logger, data: { [key: string]: string }, awsService: string, oTel: any) {
request: any, const { activeSpan, extractParamsSpan, activeTracerContext, tracer } = oTel;
log: Logger, activeSpan?.addEvent('Entered V4 header auth check');
data: { [key: string]: string },
awsService: string
) {
log.trace('running header auth check'); log.trace('running header auth check');
activeSpan?.addEvent('Extracting security token');
const token = request.headers['x-amz-security-token']; const token = request.headers['x-amz-security-token'];
if (token && !constants.iamSecurityToken.pattern.test(token)) { if (token && !constants.iamSecurityToken.pattern.test(token)) {
log.debug('invalid security token', { token }); log.debug('invalid security token', { token });
activeSpan.recordException(errors.InvalidToken);
extractParamsSpan.end();
return { err: errors.InvalidToken }; return { err: errors.InvalidToken };
} }
activeSpan?.addEvent('Extracted security token');
// authorization header activeSpan?.addEvent('Extracting authorization header');
const authHeader = request.headers.authorization; const authHeader = request.headers.authorization;
if (!authHeader) { if (!authHeader) {
log.debug('missing authorization header'); log.debug('missing authorization header');
activeSpan.recordException(errors.MissingSecurityHeader);
extractParamsSpan.end();
return { err: errors.MissingSecurityHeader }; return { err: errors.MissingSecurityHeader };
} }
activeSpan?.addEvent('Extracted authorization header');
activeSpan?.addEvent('Extracting auth header items');
const authHeaderItems = extractAuthItems(authHeader, log); const authHeaderItems = extractAuthItems(authHeader, log);
if (Object.keys(authHeaderItems).length < 3) { if (Object.keys(authHeaderItems).length < 3) {
log.debug('invalid authorization header', { authHeader }); log.debug('invalid authorization header', { authHeader });
activeSpan.recordException(errors.InvalidArgument);
extractParamsSpan.end();
return { err: errors.InvalidArgument }; return { err: errors.InvalidArgument };
} }
activeSpan?.addEvent('Extracted auth header items');
const payloadChecksum = request.headers['x-amz-content-sha256']; const payloadChecksum = request.headers['x-amz-content-sha256'];
if (!payloadChecksum && awsService !== 'iam') { if (!payloadChecksum && awsService !== 'iam') {
log.debug('missing payload checksum'); log.debug('missing payload checksum');
activeSpan.recordException(errors.MissingSecurityHeader);
extractParamsSpan.end();
return { err: errors.MissingSecurityHeader }; return { err: errors.MissingSecurityHeader };
} }
if (payloadChecksum === 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') { if (payloadChecksum === 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') {
log.trace('requesting streaming v4 auth'); log.trace('requesting streaming v4 auth');
if (request.method !== 'PUT') { if (request.method !== 'PUT') {
log.debug('streaming v4 auth for put only', log.debug('streaming v4 auth for put only', { method: 'auth/v4/headerAuthCheck.check' });
{ method: 'auth/v4/headerAuthCheck.check' }); activeSpan.recordException(errors.InvalidArgument);
extractParamsSpan.end();
return { err: errors.InvalidArgument }; return { err: errors.InvalidArgument };
} }
if (!request.headers['x-amz-decoded-content-length']) { if (!request.headers['x-amz-decoded-content-length']) {
activeSpan.recordException(errors.MissingSecurityHeader);
extractParamsSpan.end();
return { err: errors.MissingSecurityHeader }; return { err: errors.MissingSecurityHeader };
} }
} }
log.trace('authorization header from request', { authHeader }); log.trace('authorization header from request', { authHeader });
const signatureFromRequest = authHeaderItems.signatureFromRequest!; const signatureFromRequest = authHeaderItems.signatureFromRequest!;
const credentialsArr = authHeaderItems.credentialsArr!; const credentialsArr = authHeaderItems.credentialsArr!;
const signedHeaders = authHeaderItems.signedHeaders!; const signedHeaders = authHeaderItems.signedHeaders!;
activeSpan.addEvent('Checking if signed headers are complete');
if (!areSignedHeadersComplete(signedHeaders, request.headers)) { if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
log.debug('signedHeaders are incomplete', { signedHeaders }); log.debug('signedHeaders are incomplete', { signedHeaders });
activeSpan.recordException(errors.AccessDenied);
extractParamsSpan.end();
return { err: errors.AccessDenied }; return { err: errors.AccessDenied };
} }
activeSpan.addEvent('Signed headers are complete');
let timestamp: string | undefined; let timestamp: string | undefined;
// check request timestamp // check request timestamp
activeSpan.addEvent('Checking request timestamp');
const xAmzDate = request.headers['x-amz-date']; const xAmzDate = request.headers['x-amz-date'];
if (xAmzDate) { if (xAmzDate) {
const xAmzDateArr = xAmzDate.split('T'); const xAmzDateArr = xAmzDate.split('T');
// check that x-amz- date has the correct format and after epochTime // check that x-amz- date has the correct format and after epochTime
if (xAmzDateArr.length === 2 && xAmzDateArr[0].length === 8 if (xAmzDateArr.length === 2 && xAmzDateArr[0].length === 8 && xAmzDateArr[1].length === 7 && Number.parseInt(xAmzDateArr[0], 10) > 19700101) {
&& xAmzDateArr[1].length === 7
&& Number.parseInt(xAmzDateArr[0], 10) > 19700101) {
// format of x-amz- date is ISO 8601: YYYYMMDDTHHMMSSZ // format of x-amz- date is ISO 8601: YYYYMMDDTHHMMSSZ
timestamp = request.headers['x-amz-date']; timestamp = request.headers['x-amz-date'];
} }
@ -92,27 +100,27 @@ export function check(
timestamp = convertUTCtoISO8601(request.headers.date); timestamp = convertUTCtoISO8601(request.headers.date);
} }
if (!timestamp) { if (!timestamp) {
log.debug('missing or invalid date header', log.debug('missing or invalid date header', { method: 'auth/v4/headerAuthCheck.check' });
{ method: 'auth/v4/headerAuthCheck.check' }); activeSpan.recordException(errors.AccessDenied.customizeDescription('Authentication requires a valid Date or x-amz-date header'));
return { err: errors.AccessDenied. extractParamsSpan.end();
customizeDescription('Authentication requires a valid Date or ' + return { err: errors.AccessDenied.customizeDescription('Authentication requires a valid Date or x-amz-date header') };
'x-amz-date header') };
} }
activeSpan.addEvent('Request timestamp is valid');
const validationResult = validateCredentials(credentialsArr, timestamp, activeSpan.addEvent('Validating credentials');
log); const validationResult = validateCredentials(credentialsArr, timestamp, log);
if (validationResult instanceof Error) { if (validationResult instanceof Error) {
log.debug('credentials in improper format', { credentialsArr, log.debug('credentials in improper format', { credentialsArr, timestamp, validationResult });
timestamp, validationResult }); activeSpan.recordException(validationResult);
extractParamsSpan.end();
return { err: validationResult }; return { err: validationResult };
} }
activeSpan.addEvent('Credentials are valid');
// credentialsArr is [accessKey, date, region, aws-service, aws4_request] // credentialsArr is [accessKey, date, region, aws-service, aws4_request]
const scopeDate = credentialsArr[1]; const scopeDate = credentialsArr[1];
const region = credentialsArr[2]; const region = credentialsArr[2];
const service = credentialsArr[3]; const service = credentialsArr[3];
const accessKey = credentialsArr.shift(); const accessKey = credentialsArr.shift();
const credentialScope = credentialsArr.join('/'); const credentialScope = credentialsArr.join('/');
// In AWS Signature Version 4, the signing key is valid for up to seven days // In AWS Signature Version 4, the signing key is valid for up to seven days
// (see Introduction to Signing Requests. // (see Introduction to Signing Requests.
// Therefore, a signature is also valid for up to seven days or // Therefore, a signature is also valid for up to seven days or
@ -124,25 +132,17 @@ export function check(
// TODO: When implementing bucket policies, // TODO: When implementing bucket policies,
// note that expiration can be shortened so // note that expiration can be shortened so
// expiry is as set out in the policy. // expiry is as set out in the policy.
// 15 minutes in seconds // 15 minutes in seconds
activeSpan.addEvent('checking if signature is expired')
const expiry = (15 * 60); const expiry = (15 * 60);
const isTimeSkewed = checkTimeSkew(timestamp, expiry, log); const isTimeSkewed = checkTimeSkew(timestamp, expiry, log);
if (isTimeSkewed) { if (isTimeSkewed) {
activeSpan.recordException(errors.RequestTimeTooSkewed);
extractParamsSpan.end();
return { err: errors.RequestTimeTooSkewed }; return { err: errors.RequestTimeTooSkewed };
} }
activeSpan.addEvent('signature is not expired');
let proxyPath: string | undefined; activeSpan.addEvent('Constructing string to sign');
if (request.headers.proxy_path) {
try {
proxyPath = decodeURIComponent(request.headers.proxy_path);
} catch (err) {
log.debug('invalid proxy_path header', { proxyPath, err });
return { err: errors.InvalidArgument.customizeDescription(
'invalid proxy_path header') };
}
}
const stringToSign = constructStringToSign({ const stringToSign = constructStringToSign({
log, log,
request, request,
@ -152,14 +152,16 @@ export function check(
timestamp, timestamp,
payloadChecksum, payloadChecksum,
awsService: service, awsService: service,
proxyPath, }, oTel);
});
log.trace('constructed stringToSign', { stringToSign }); log.trace('constructed stringToSign', { stringToSign });
if (stringToSign instanceof Error) { if (stringToSign instanceof Error) {
activeSpan.recordException(stringToSign);
extractParamsSpan.end();
return { err: stringToSign }; return { err: stringToSign };
} }
activeSpan.addEvent('Constructed string to sign v4 headers');
activeSpan.addEvent('Exiting V4 header auth check');
extractParamsSpan.end();
return { return {
err: null, err: null,
params: { params: {
@ -174,8 +176,6 @@ export function check(
authType: 'REST-HEADER', authType: 'REST-HEADER',
signatureVersion: 'AWS4-HMAC-SHA256', signatureVersion: 'AWS4-HMAC-SHA256',
signatureAge: Date.now() - convertAmzTimeToMs(timestamp), signatureAge: Date.now() - convertAmzTimeToMs(timestamp),
// credentialScope and timestamp needed for streaming V4
// chunk evaluation
credentialScope, credentialScope,
timestamp, timestamp,
securityToken: token, securityToken: token,
@ -183,3 +183,4 @@ export function check(
}, },
}; };
} }

View File

@ -12,10 +12,20 @@ import { areSignedHeadersComplete } from './validateInputs';
* @param log - logging object * @param log - logging object
* @param data - Contain authentification params (GET or POST data) * @param data - Contain authentification params (GET or POST data)
*/ */
export function check(request: any, log: Logger, data: { [key: string]: string }) { export function check(request: any, log: Logger, data: { [key: string]: string }, oTel: any) {
const {
activeSpan,
extractParamsSpan,
activeTracerContext,
tracer,
} = oTel;
activeSpan?.addEvent('Arsenal:: entered Arsenal.auth.v4.queryAuthCheck');
activeSpan?.addEvent('Arsenal:: extracting query parameters')
const authParams = extractQueryParams(data, log); const authParams = extractQueryParams(data, log);
activeSpan?.addEvent('Arsenal:: extracting query params');
if (Object.keys(authParams).length !== 5) { if (Object.keys(authParams).length !== 5) {
activeSpan.recordException(errors.InvalidArgument);
extractParamsSpan.end();
return { err: errors.InvalidArgument }; return { err: errors.InvalidArgument };
} }
@ -24,6 +34,8 @@ export function check(request: any, log: Logger, data: { [key: string]: string }
const token = data['X-Amz-Security-Token']; const token = data['X-Amz-Security-Token'];
if (token && !constants.iamSecurityToken.pattern.test(token)) { if (token && !constants.iamSecurityToken.pattern.test(token)) {
log.debug('invalid security token', { token }); log.debug('invalid security token', { token });
activeSpan.recordException(errors.InvalidToken);
extractParamsSpan.end();
return { err: errors.InvalidToken }; return { err: errors.InvalidToken };
} }
@ -35,6 +47,8 @@ export function check(request: any, log: Logger, data: { [key: string]: string }
if (!areSignedHeadersComplete(signedHeaders, request.headers)) { if (!areSignedHeadersComplete(signedHeaders, request.headers)) {
log.debug('signedHeaders are incomplete', { signedHeaders }); log.debug('signedHeaders are incomplete', { signedHeaders });
activeSpan.recordException(errors.AccessDenied);
extractParamsSpan.end();
return { err: errors.AccessDenied }; return { err: errors.AccessDenied };
} }
@ -43,6 +57,8 @@ export function check(request: any, log: Logger, data: { [key: string]: string }
if (validationResult instanceof Error) { if (validationResult instanceof Error) {
log.debug('credentials in improper format', { credential, log.debug('credentials in improper format', { credential,
timestamp, validationResult }); timestamp, validationResult });
activeSpan.recordException(validationResult);
extractParamsSpan.end();
return { err: validationResult }; return { err: validationResult };
} }
const accessKey = credential[0]; const accessKey = credential[0];
@ -53,20 +69,11 @@ export function check(request: any, log: Logger, data: { [key: string]: string }
const isTimeSkewed = checkTimeSkew(timestamp, expiry, log); const isTimeSkewed = checkTimeSkew(timestamp, expiry, log);
if (isTimeSkewed) { if (isTimeSkewed) {
activeSpan.recordException(errors.RequestTimeTooSkewed);
extractParamsSpan.end();
return { err: errors.RequestTimeTooSkewed }; return { err: errors.RequestTimeTooSkewed };
} }
let proxyPath: string | undefined;
if (request.headers.proxy_path) {
try {
proxyPath = decodeURIComponent(request.headers.proxy_path);
} catch (err) {
log.debug('invalid proxy_path header', { proxyPath });
return { err: errors.InvalidArgument.customizeDescription(
'invalid proxy_path header') };
}
}
// In query v4 auth, the canonical request needs // In query v4 auth, the canonical request needs
// to include the query params OTHER THAN // to include the query params OTHER THAN
// the signature so create a // the signature so create a
@ -82,6 +89,7 @@ export function check(request: any, log: Logger, data: { [key: string]: string }
// building string to sign // building string to sign
const payloadChecksum = 'UNSIGNED-PAYLOAD'; const payloadChecksum = 'UNSIGNED-PAYLOAD';
activeSpan?.addEvent('Constructing string to sign');
const stringToSign = constructStringToSign({ const stringToSign = constructStringToSign({
log, log,
request, request,
@ -92,12 +100,16 @@ export function check(request: any, log: Logger, data: { [key: string]: string }
credentialScope: credentialScope:
`${scopeDate}/${region}/${service}/${requestType}`, `${scopeDate}/${region}/${service}/${requestType}`,
awsService: service, awsService: service,
proxyPath, }, oTel);
}); activeSpan?.addEvent('Constructed string to sign v4 query');
if (stringToSign instanceof Error) { if (stringToSign instanceof Error) {
activeSpan.recordException(stringToSign);
extractParamsSpan.end();
return { err: stringToSign }; return { err: stringToSign };
} }
log.trace('constructed stringToSign', { stringToSign }); log.trace('constructed stringToSign', { stringToSign });
activeSpan.addEvent('Arsenal:: exiting Arsenal.auth.v4.queryAuthCheck');
extractParamsSpan.end();
return { return {
err: null, err: null,
params: { params: {

View File

@ -3,7 +3,7 @@ import async from 'async';
import errors from '../../../errors'; import errors from '../../../errors';
import { Logger } from 'werelogs'; import { Logger } from 'werelogs';
import Vault, { AuthV4RequestParams } from '../../Vault'; import Vault, { AuthV4RequestParams } from '../../Vault';
import { Callback } from '../../backends/in_memory/types'; import { Callback } from '../../in_memory/types';
import constructChunkStringToSign from './constructChunkStringToSign'; import constructChunkStringToSign from './constructChunkStringToSign';

View File

@ -83,7 +83,7 @@ export type ResultObject = {
export type CommandPromise = { export type CommandPromise = {
resolve: (results?: ResultObject[]) => void; resolve: (results?: ResultObject[]) => void;
reject: (error: Error) => void; reject: (error: Error) => void;
timeout: NodeJS.Timeout | null; timeout: NodeJS.Timer | null;
}; };
export type HandlerCallback = (error: (Error & { code?: number }) | null | undefined, result?: any) => void; export type HandlerCallback = (error: (Error & { code?: number }) | null | undefined, result?: any) => void;
export type HandlerFunction = (payload: object, uids: string, callback: HandlerCallback) => void; export type HandlerFunction = (payload: object, uids: string, callback: HandlerCallback) => void;
@ -254,7 +254,7 @@ export async function sendWorkerCommand(
} }
rpcLogger.info('sending command', { toWorkers, toHandler, uids, payload }); rpcLogger.info('sending command', { toWorkers, toHandler, uids, payload });
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
let timeout: NodeJS.Timeout | null = null; let timeout: NodeJS.Timer | null = null;
if (timeoutMs) { if (timeoutMs) {
timeout = setTimeout(() => { timeout = setTimeout(() => {
delete uidsToCommandPromise[uids]; delete uidsToCommandPromise[uids];

View File

@ -2,18 +2,18 @@ import * as crypto from 'crypto';
// The min value here is to manage further backward compat if we // The min value here is to manage further backward compat if we
// need it // need it
// Default value const iamSecurityTokenSizeMin = 128;
export const vaultGeneratedIamSecurityTokenSizeMin = 128; const iamSecurityTokenSizeMax = 128;
// Safe to assume that a typical token size is less than 8192 bytes // Security token is an hex string (no real format from amazon)
export const vaultGeneratedIamSecurityTokenSizeMax = 8192; const iamSecurityTokenPattern = new RegExp(
// Base-64 `^[a-f0-9]{${iamSecurityTokenSizeMin},${iamSecurityTokenSizeMax}}$`,
export const vaultGeneratedIamSecurityTokenPattern = /^[A-Za-z0-9/+=]*$/; );
// info about the iam security token // info about the iam security token
export const iamSecurityToken = { export const iamSecurityToken = {
min: vaultGeneratedIamSecurityTokenSizeMin, min: iamSecurityTokenSizeMin,
max: vaultGeneratedIamSecurityTokenSizeMax, max: iamSecurityTokenSizeMax,
pattern: vaultGeneratedIamSecurityTokenPattern, pattern: iamSecurityTokenPattern,
}; };
// PublicId is used as the canonicalID for a request that contains // PublicId is used as the canonicalID for a request that contains
// no authentication information. Requestor can access // no authentication information. Requestor can access
@ -22,7 +22,6 @@ export const publicId = 'http://acs.amazonaws.com/groups/global/AllUsers';
export const zenkoServiceAccount = 'http://acs.zenko.io/accounts/service'; export const zenkoServiceAccount = 'http://acs.zenko.io/accounts/service';
export const metadataFileNamespace = '/MDFile'; export const metadataFileNamespace = '/MDFile';
export const dataFileURL = '/DataFile'; export const dataFileURL = '/DataFile';
export const passthroughFileURL = '/PassthroughFile';
// AWS states max size for user-defined metadata // AWS states max size for user-defined metadata
// (x-amz-meta- headers) is 2 KB: // (x-amz-meta- headers) is 2 KB:
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html // http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
@ -32,16 +31,7 @@ export const maximumMetaHeadersSize = 2136;
export const emptyFileMd5 = 'd41d8cd98f00b204e9800998ecf8427e'; export const emptyFileMd5 = 'd41d8cd98f00b204e9800998ecf8427e';
// Version 2 changes the format of the data location property // Version 2 changes the format of the data location property
// Version 3 adds the dataStoreName attribute // Version 3 adds the dataStoreName attribute
// Version 4 add the Creation-Time and Content-Language attributes, export const mdModelVersion = 3;
// and add support for x-ms-meta-* headers in UserMetadata
// Version 5 adds the azureInfo structure
// Version 6 adds a "deleted" flag that is updated to true before
// the object gets deleted. This is done to keep object metadata in the
// oplog when deleting the object, as oplog deletion events don't contain
// any metadata of the object.
// version 6 also adds the "isPHD" flag that is used to indicate that the master
// object is a placeholder and is not up to date.
export const mdModelVersion = 6;
/* /*
* Splitter is used to build the object name for the overview of a * Splitter is used to build the object name for the overview of a
* multipart upload and to build the object names for each part of a * multipart upload and to build the object names for each part of a
@ -81,45 +71,19 @@ export const mpuBucketPrefix = 'mpuShadowBucket';
export const permittedCapitalizedBuckets = { export const permittedCapitalizedBuckets = {
METADATA: true, METADATA: true,
}; };
// Setting a lower object key limit to account for:
// - Mongo key limit of 1012 bytes
// - Version ID in Mongo Key if versioned of 33
// - Max bucket name length if bucket match false of 63
// - Extra prefix slash for bucket prefix if bucket match of 1
export const objectKeyByteLimit = 915;
/* delimiter for location-constraint. The location constraint will be able
* to include the ingestion flag
*/
export const zenkoSeparator = ':';
/* eslint-disable camelcase */ /* eslint-disable camelcase */
export const externalBackends = { aws_s3: true, azure: true, gcp: true, pfs: true }; export const externalBackends = { aws_s3: true, azure: true, gcp: true, pfs: true }
export const replicationBackends = { aws_s3: true, azure: true, gcp: true }; export const hasCopyPartBackends = { aws_s3: true, gcp: true }
// hex digest of sha256 hash of empty string: export const versioningNotImplBackends = { azure: true, gcp: true }
export const emptyStringHash = crypto.createHash('sha256') export const mpuMDStoredExternallyBackend = { aws_s3: true, gcp: true }
.update('', 'binary').digest('hex');
export const mpuMDStoredExternallyBackend = { aws_s3: true, gcp: true };
// AWS sets a minimum size limit for parts except for the last part. // AWS sets a minimum size limit for parts except for the last part.
// http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html // http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
export const minimumAllowedPartSize = 5242880; export const minimumAllowedPartSize = 5242880;
export const gcpMaximumAllowedPartCount = 1024; // hex digest of sha256 hash of empty string:
// GCP Object Tagging Prefix export const emptyStringHash = crypto.createHash('sha256').update('', 'binary').digest('hex');
export const gcpTaggingPrefix = 'aws-tag-';
export const productName = 'APN/1.0 Scality/1.0 Scality CloudServer for Zenko';
export const legacyLocations = ['sproxyd', 'legacy'];
// healthcheck default call from nginx is every 2 seconds
// for external backends, don't call unless at least 1 minute
// (60,000 milliseconds) since last call
export const externalBackendHealthCheckInterval = 60000;
// some of the available data backends (if called directly rather
// than through the multiple backend gateway) need a key provided
// as a string as first parameter of the get/delete methods.
export const clientsRequireStringKey = { sproxyd: true, cdmi: true };
export const hasCopyPartBackends = { aws_s3: true, gcp: true };
export const versioningNotImplBackends = { azure: true, gcp: true };
// user metadata applied on zenko-created objects
export const zenkoIDHeader = 'x-amz-meta-zenko-instance-id';
// Default expiration value of the S3 pre-signed URL duration // Default expiration value of the S3 pre-signed URL duration
// 604800 seconds (seven days). // 604800 seconds (seven days).
export const legacyLocations = ['sproxyd', 'legacy'];
export const defaultPreSignedURLExpiry = 7 * 24 * 60 * 60; export const defaultPreSignedURLExpiry = 7 * 24 * 60 * 60;
// Regex for ISO-8601 formatted date // Regex for ISO-8601 formatted date
export const shortIso8601Regex = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z/; export const shortIso8601Regex = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z/;
@ -132,21 +96,16 @@ export const supportedNotificationEvents = new Set([
's3:ObjectRemoved:*', 's3:ObjectRemoved:*',
's3:ObjectRemoved:Delete', 's3:ObjectRemoved:Delete',
's3:ObjectRemoved:DeleteMarkerCreated', 's3:ObjectRemoved:DeleteMarkerCreated',
's3:Replication:OperationFailedReplication',
's3:ObjectTagging:*', 's3:ObjectTagging:*',
's3:ObjectTagging:Put', 's3:ObjectTagging:Put',
's3:ObjectTagging:Delete', 's3:ObjectTagging:Delete',
's3:ObjectAcl:Put', 's3:ObjectAcl:Put',
's3:ObjectRestore:*',
's3:ObjectRestore:Post',
's3:ObjectRestore:Completed',
's3:ObjectRestore:Delete',
's3:LifecycleTransition',
's3:LifecycleExpiration:*',
's3:LifecycleExpiration:DeleteMarkerCreated',
's3:LifecycleExpiration:Delete',
]); ]);
export const notificationArnPrefix = 'arn:scality:bucketnotif'; export const notificationArnPrefix = 'arn:scality:bucketnotif';
// some of the available data backends (if called directly rather
// than through the multiple backend gateway) need a key provided
// as a string as first parameter of the get/delete methods.
export const clientsRequireStringKey = { sproxyd: true, cdmi: true };
// HTTP server keep-alive timeout is set to a higher value than // HTTP server keep-alive timeout is set to a higher value than
// client's free sockets timeout to avoid the risk of triggering // client's free sockets timeout to avoid the risk of triggering
// ECONNRESET errors if the server closes the connection at the // ECONNRESET errors if the server closes the connection at the
@ -163,14 +122,10 @@ export const supportedLifecycleRules = [
'expiration', 'expiration',
'noncurrentVersionExpiration', 'noncurrentVersionExpiration',
'abortIncompleteMultipartUpload', 'abortIncompleteMultipartUpload',
'transitions',
'noncurrentVersionTransition',
]; ];
// Maximum number of buckets to cache (bucket metadata) // Maximum number of buckets to cache (bucket metadata)
export const maxCachedBuckets = process.env.METADATA_MAX_CACHED_BUCKETS ? export const maxCachedBuckets = process.env.METADATA_MAX_CACHED_BUCKETS ?
Number(process.env.METADATA_MAX_CACHED_BUCKETS) : 1000; Number(process.env.METADATA_MAX_CACHED_BUCKETS) : 1000;
export const validRestoreObjectTiers = new Set(['Expedited', 'Standard', 'Bulk']);
export const maxBatchingConcurrentOperations = 5; export const maxBatchingConcurrentOperations = 5;
/** For policy resource arn check we allow empty account ID to not break compatibility */ /** For policy resource arn check we allow empty account ID to not break compatibility */

View File

@ -1042,15 +1042,3 @@ export const AuthMethodNotImplemented: ErrorFormat = {
description: 'AuthMethodNotImplemented', description: 'AuthMethodNotImplemented',
code: 501, code: 501,
}; };
// --------------------- quotaErros ---------------------
export const NoSuchQuota: ErrorFormat = {
code: 404,
description: 'The specified resource does not have a quota.',
};
export const QuotaExceeded: ErrorFormat = {
code: 429,
description: 'The quota set for the resource is exceeded.',
};

View File

@ -2,7 +2,7 @@ import type { ServerResponse } from 'http';
import * as rawErrors from './arsenalErrors'; import * as rawErrors from './arsenalErrors';
/** All possible errors names. */ /** All possible errors names. */
export type Name = keyof typeof rawErrors; export type Name = keyof typeof rawErrors
/** Object containing all errors names. It has the format { [Name]: "Name" } */ /** Object containing all errors names. It has the format { [Name]: "Name" } */
export type Names = { [Name_ in Name]: Name_ }; export type Names = { [Name_ in Name]: Name_ };
/** Mapping used to determine an error type. It has the format { [Name]: boolean } */ /** Mapping used to determine an error type. It has the format { [Name]: boolean } */
@ -13,7 +13,7 @@ export type Errors = { [_ in Name]: ArsenalError };
// This object is reused constantly through createIs, we store it there // This object is reused constantly through createIs, we store it there
// to avoid recomputation. // to avoid recomputation.
const isBase = Object.fromEntries( const isBase = Object.fromEntries(
Object.keys(rawErrors).map((key) => [key, false]) Object.keys(rawErrors).map(key => [key, false])
) as Is; ) as Is;
// This allows to conditionally add the old behavior of errors to properly // This allows to conditionally add the old behavior of errors to properly
@ -32,7 +32,7 @@ export const allowUnsafeErrComp = (
// the Proxy will return false. // the Proxy will return false.
const createIs = (type: Name): Is => { const createIs = (type: Name): Is => {
const get = (is: Is, value: string | symbol) => is[value] ?? false; const get = (is: Is, value: string | symbol) => is[value] ?? false;
const final = Object.freeze({ ...isBase, [type]: true }); const final = Object.freeze({ ...isBase, [type]: true })
return new Proxy(final, { get }); return new Proxy(final, { get });
}; };
@ -46,18 +46,13 @@ export class ArsenalError extends Error {
/** Object used to determine the error type. /** Object used to determine the error type.
* Example: error.is.InternalError */ * Example: error.is.InternalError */
#is: Is; #is: Is;
/** A map of error metadata (can be extra fields
* that only show in debug mode) */
#metadata: Map<string, Object[]>;
private constructor(type: Name, code: number, description: string, private constructor(type: Name, code: number, description: string) {
metadata?: Map<string, Object[]>) {
super(type); super(type);
this.#code = code; this.#code = code;
this.#description = description; this.#description = description;
this.#type = type; this.#type = type;
this.#is = createIs(type); this.#is = createIs(type);
this.#metadata = metadata ?? new Map<string, Object[]>();
// This restores the old behavior of errors, to make sure they're now // This restores the old behavior of errors, to make sure they're now
// backward-compatible. Fortunately it's handled by TS, but it cannot // backward-compatible. Fortunately it's handled by TS, but it cannot
@ -111,22 +106,7 @@ export class ArsenalError extends Error {
customizeDescription(description: string): ArsenalError { customizeDescription(description: string): ArsenalError {
const type = this.#type; const type = this.#type;
const code = this.#code; const code = this.#code;
const metadata = new Map(this.#metadata); return new ArsenalError(type, code, description);
const err = new ArsenalError(type, code, description, metadata);
err.stack = this.stack;
return err;
}
/** Clone the error with a new metadata field */
addMetadataEntry(key: string, value: Object[]): ArsenalError {
const type = this.#type;
const code = this.#code;
const description = this.#description;
const metadata = new Map(this.#metadata);
metadata.set(key, value);
const err = new ArsenalError(type, code, description, metadata);
err.stack = this.stack;
return err;
} }
/** Used to determine the error type. Example: error.is.InternalError */ /** Used to determine the error type. Example: error.is.InternalError */
@ -151,14 +131,9 @@ export class ArsenalError extends Error {
return this.#type; return this.#type;
} }
/** A map of error metadata */
get metadata() {
return this.#metadata;
}
/** Generate all possible errors. An instance is created by default. */ /** Generate all possible errors. An instance is created by default. */
static errors() { static errors() {
const errors = {}; const errors = {}
Object.entries(rawErrors).forEach((value) => { Object.entries(rawErrors).forEach((value) => {
const name = value[0] as Name; const name = value[0] as Name;
const error = value[1]; const error = value[1];
@ -166,7 +141,7 @@ export class ArsenalError extends Error {
const get = () => new ArsenalError(name, code, description); const get = () => new ArsenalError(name, code, description);
Object.defineProperty(errors, name, { get }); Object.defineProperty(errors, name, { get });
}); });
return errors as Errors; return errors as Errors
} }
} }

View File

@ -7,8 +7,8 @@
"test": "mocha --recursive --timeout 5500 tests/unit" "test": "mocha --recursive --timeout 5500 tests/unit"
}, },
"dependencies": { "dependencies": {
"mocha": "5.2.0", "mocha": "2.5.3",
"async": "~2.6.1", "async": "^2.6.0",
"node-forge": "^0.7.1" "node-forge": "^0.7.1"
} }
} }

View File

@ -20,32 +20,7 @@ export default class RedisClient {
return this; return this;
} }
/** /** increment value of a key by 1 and set a ttl */
* scan a pattern and return matching keys
* @param pattern - string pattern to match with all existing keys
* @param [count=10] - scan count
* @param cb - callback (error, result)
*/
scan(pattern: string, count = 10, cb: Callback) {
const params = { match: pattern, count };
const keys: any[] = [];
const stream = this._client.scanStream(params);
stream.on('data', resultKeys => {
for (let i = 0; i < resultKeys.length; i++) {
keys.push(resultKeys[i]);
}
});
stream.on('end', () => {
cb(null, keys);
});
}
/** increment value of a key by 1 and set a ttl
* @param key - key holding the value
* @param expiry - expiry in seconds
* @param cb - callback
*/
incrEx(key: string, expiry: number, cb: Callback) { incrEx(key: string, expiry: number, cb: Callback) {
const exp = expiry.toString(); const exp = expiry.toString();
return this._client return this._client
@ -53,22 +28,7 @@ export default class RedisClient {
.exec(cb); .exec(cb);
} }
/** /** increment value of a key by a given amount and set a ttl */
* increment value of a key by a given amount
* @param key - key holding the value
* @param amount - amount to increase by
* @param cb - callback
*/
incrby(key: string, amount: number, cb: Callback) {
return this._client.incrby(key, amount, cb);
}
/** increment value of a key by a given amount and set a ttl
* @param key - key holding the value
* @param amount - amount to increase by
* @param expiry - expiry in seconds
* @param cb - callback
*/
incrbyEx(key: string, amount: number, expiry: number, cb: Callback) { incrbyEx(key: string, amount: number, expiry: number, cb: Callback) {
const am = amount.toString(); const am = amount.toString();
const exp = expiry.toString(); const exp = expiry.toString();
@ -77,29 +37,13 @@ export default class RedisClient {
.exec(cb); .exec(cb);
} }
/** /** execute a batch of commands */
* decrement value of a key by a given amount
* @param key - key holding the value
* @param amount - amount to increase by
* @param cb - callback
*/
decrby(key: string, amount: number, cb: Callback) {
return this._client.decrby(key, amount, cb);
}
/**
* execute a batch of commands
* @param cmds - list of commands
* @param cb - callback
* @return
*/
batch(cmds: string[][], cb: Callback) { batch(cmds: string[][], cb: Callback) {
return this._client.pipeline(cmds).exec(cb); return this._client.pipeline(cmds).exec(cb);
} }
/** /**
* Checks if a key exists * Checks if a key exists
* @param key - name of key
* @param cb - callback * @param cb - callback
* If cb response returns 0, key does not exist. * If cb response returns 0, key does not exist.
* If cb response returns 1, key exists. * If cb response returns 1, key exists.
@ -108,22 +52,10 @@ export default class RedisClient {
return this._client.exists(key, cb); return this._client.exists(key, cb);
} }
/**
* get value stored at key
* @param key - key holding the value
* @param cb - callback
*/
get(key: string, cb: Callback) {
return this._client.get(key, cb);
}
/** /**
* Add a value and its score to a sorted set. If no sorted set exists, this * Add a value and its score to a sorted set. If no sorted set exists, this
* will create a new one for the given key. * will create a new one for the given key.
* @param key - name of key
* @param score - score used to order set * @param score - score used to order set
* @param value - value to store
* @param cb - callback
*/ */
zadd(key: string, score: number, value: string, cb: Callback) { zadd(key: string, score: number, value: string, cb: Callback) {
return this._client.zadd(key, score, value, cb); return this._client.zadd(key, score, value, cb);
@ -134,8 +66,6 @@ export default class RedisClient {
* Note: using this on a key that does not exist will return 0. * Note: using this on a key that does not exist will return 0.
* Note: using this on an existing key that isn't a sorted set will * Note: using this on an existing key that isn't a sorted set will
* return an error WRONGTYPE. * return an error WRONGTYPE.
* @param key - name of key
* @param cb - callback
*/ */
zcard(key: string, cb: Callback) { zcard(key: string, cb: Callback) {
return this._client.zcard(key, cb); return this._client.zcard(key, cb);
@ -146,9 +76,6 @@ export default class RedisClient {
* Note: using this on a key that does not exist will return nil. * Note: using this on a key that does not exist will return nil.
* Note: using this on a value that does not exist in a valid sorted set key * Note: using this on a value that does not exist in a valid sorted set key
* will return nil. * will return nil.
* @param key - name of key
* @param value - value within sorted set
* @param cb - callback
*/ */
zscore(key: string, value: string, cb: Callback) { zscore(key: string, value: string, cb: Callback) {
return this._client.zscore(key, value, cb); return this._client.zscore(key, value, cb);
@ -156,10 +83,8 @@ export default class RedisClient {
/** /**
* Remove a value from a sorted set * Remove a value from a sorted set
* @param key - name of key * @param value - value within sorted set. Can specify multiple values within an array
* @param value - value within sorted set. Can specify * @param {function} cb - callback
* multiple values within an array
* @param cb - callback
* The cb response returns number of values removed * The cb response returns number of values removed
*/ */
zrem(key: string, value: string | string[], cb: Callback) { zrem(key: string, value: string | string[], cb: Callback) {
@ -168,10 +93,8 @@ export default class RedisClient {
/** /**
* Get specified range of elements in a sorted set * Get specified range of elements in a sorted set
* @param key - name of key
* @param start - start index (inclusive) * @param start - start index (inclusive)
* @param end - end index (inclusive) (can use -1) * @param end - end index (inclusive) (can use -1)
* @param cb - callback
*/ */
zrange(key: string, start: number, end: number, cb: Callback) { zrange(key: string, start: number, end: number, cb: Callback) {
return this._client.zrange(key, start, end, cb); return this._client.zrange(key, start, end, cb);
@ -179,12 +102,10 @@ export default class RedisClient {
/** /**
* Get range of elements in a sorted set based off score * Get range of elements in a sorted set based off score
* @param key - name of key
* @param min - min score value (inclusive) * @param min - min score value (inclusive)
* (can use "-inf") * (can use "-inf")
* @param max - max score value (inclusive) * @param max - max score value (inclusive)
* (can use "+inf") * (can use "+inf")
* @param cb - callback
*/ */
zrangebyscore( zrangebyscore(
key: string, key: string,
@ -195,15 +116,6 @@ export default class RedisClient {
return this._client.zrangebyscore(key, min, max, cb); return this._client.zrangebyscore(key, min, max, cb);
} }
/**
* get TTL or expiration in seconds
* @param key - name of key
* @param cb - callback
*/
ttl(key: string, cb: Callback) {
return this._client.ttl(key, cb);
}
clear(cb: Callback) { clear(cb: Callback) {
return this._client.flushdb(cb); return this._client.flushdb(cb);
} }
@ -211,8 +123,4 @@ export default class RedisClient {
disconnect() { disconnect() {
this._client.disconnect(); this._client.disconnect();
} }
listClients(cb: Callback) {
return this._client.client('list', cb);
}
} }

View File

@ -2,8 +2,6 @@ import async from 'async';
import RedisClient from './RedisClient'; import RedisClient from './RedisClient';
import { Logger } from 'werelogs'; import { Logger } from 'werelogs';
export type Callback = (error: Error | null, value?: any) => void;
export default class StatsClient { export default class StatsClient {
_redis: RedisClient; _redis: RedisClient;
_interval: number; _interval: number;
@ -50,7 +48,7 @@ export default class StatsClient {
* @param d - Date instance * @param d - Date instance
* @return key - key for redis * @return key - key for redis
*/ */
buildKey(name: string, d: Date): string { _buildKey(name: string, d: Date): string {
return `${name}:${this._normalizeTimestamp(d)}`; return `${name}:${this._normalizeTimestamp(d)}`;
} }
@ -93,33 +91,11 @@ export default class StatsClient {
amount = (typeof incr === 'number') ? incr : 1; amount = (typeof incr === 'number') ? incr : 1;
} }
const key = this.buildKey(`${id}:requests`, new Date()); const key = this._buildKey(`${id}:requests`, new Date());
return this._redis.incrbyEx(key, amount, this._expiry, callback); return this._redis.incrbyEx(key, amount, this._expiry, callback);
} }
/**
* Increment the given key by the given value.
* @param key - The Redis key to increment
* @param incr - The value to increment by
* @param [cb] - callback
*/
incrementKey(key: string, incr: number, cb: Callback) {
const callback = cb || this._noop;
return this._redis.incrby(key, incr, callback);
}
/**
* Decrement the given key by the given value.
* @param key - The Redis key to decrement
* @param decr - The value to decrement by
* @param [cb] - callback
*/
decrementKey(key: string, decr: number, cb: Callback) {
const callback = cb || this._noop;
return this._redis.decrby(key, decr, callback);
}
/** /**
* report/record a request that ended up being a 500 on the server * report/record a request that ended up being a 500 on the server
* @param id - service identifier * @param id - service identifier
@ -129,53 +105,10 @@ export default class StatsClient {
return undefined; return undefined;
} }
const callback = cb || this._noop; const callback = cb || this._noop;
const key = this.buildKey(`${id}:500s`, new Date()); const key = this._buildKey(`${id}:500s`, new Date());
return this._redis.incrEx(key, this._expiry, callback); return this._redis.incrEx(key, this._expiry, callback);
} }
/**
* wrapper on `getStats` that handles a list of keys
* @param log - Werelogs request logger
* @param ids - service identifiers
* @param cb - callback to call with the err/result
*/
getAllStats(log: Logger, ids: string[], cb: Callback) {
if (!this._redis) {
return cb(null, {});
}
const statsRes = {
'requests': 0,
'500s': 0,
'sampleDuration': this._expiry,
};
let requests = 0;
let errors = 0;
// for now set concurrency to default of 10
return async.eachLimit(ids, 10, (id: string, done) => {
this.getStats(log, id, (err, res) => {
if (err) {
return done(err);
}
requests += res.requests;
errors += res['500s'];
return done();
});
}, error => {
if (error) {
log.error('error getting stats', {
error,
method: 'StatsClient.getAllStats',
});
return cb(null, statsRes);
}
statsRes.requests = requests;
statsRes['500s'] = errors;
return cb(null, statsRes);
});
}
/** /**
* get stats for the last x seconds, x being the sampling duration * get stats for the last x seconds, x being the sampling duration
* @param log - Werelogs request logger * @param log - Werelogs request logger
@ -190,8 +123,8 @@ export default class StatsClient {
const reqsKeys: ['get', string][] = []; const reqsKeys: ['get', string][] = [];
const req500sKeys: ['get', string][] = []; const req500sKeys: ['get', string][] = [];
for (let i = 0; i < totalKeys; i++) { for (let i = 0; i < totalKeys; i++) {
reqsKeys.push(['get', this.buildKey(`${id}:requests`, d)]); reqsKeys.push(['get', this._buildKey(`${id}:requests`, d)]);
req500sKeys.push(['get', this.buildKey(`${id}:500s`, d)]); req500sKeys.push(['get', this._buildKey(`${id}:500s`, d)]);
this._setPrevInterval(d); this._setPrevInterval(d);
} }
return async.parallel([ return async.parallel([

View File

@ -1,8 +1,4 @@
import StatsClient from './StatsClient'; import StatsClient from './StatsClient';
import { Logger } from 'werelogs';
import async from 'async';
export type Callback = (error: Error | null, value?: any) => void;
/** /**
* @class StatsModel * @class StatsModel
@ -11,145 +7,12 @@ export type Callback = (error: Error | null, value?: any) => void;
* rather than by seconds * rather than by seconds
*/ */
export default class StatsModel extends StatsClient { export default class StatsModel extends StatsClient {
/**
* Utility method to convert 2d array rows to columns, and vice versa
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip
* @param arrays - 2d array of integers
* @return converted array
*/
_zip(arrays: number[][]) {
if (arrays.length > 0 && arrays.every(a => Array.isArray(a))) {
return arrays[0].map((_, i) => arrays.map(a => a[i]));
}
return [];
}
/**
* normalize to the nearest interval
* @param d - Date instance
* @return timestamp - normalized to the nearest interval
*/
_normalizeTimestamp(d: Date) {
const m = d.getMinutes();
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0);
}
/**
* override the method to get the count as an array of integers separated
* by each interval
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
* @param arr - each index contains the result of each batch command
* where index 0 signifies the error and index 1 contains the result
* @return array of integers, ordered from most recent interval to
* oldest interval with length of (expiry / interval)
*/
// @ts-expect-errors
_getCount(arr: [any, string | null][]): number[] {
const size = Math.floor(this._expiry / this._interval);
const array = arr.reduce((store, i) => {
let num = parseInt(i[1] ??'', 10);
num = Number.isNaN(num) ? 0 : num;
store.push(num);
return store;
}, [] as number[]);
if (array.length < size) {
array.push(...Array(size - array.length).fill(0));
}
return array;
}
/**
* wrapper on `getStats` that handles a list of keys
* override the method to reduce the returned 2d array from `_getCount`
* @param log - Werelogs request logger
* @param ids - service identifiers
* @param cb - callback to call with the err/result
*/
getAllStats(log: Logger, ids: string[], cb: Callback) {
if (!this._redis) {
return cb(null, {});
}
const size = Math.floor(this._expiry / this._interval);
const statsRes = {
'requests': Array(size).fill(0),
'500s': Array(size).fill(0),
'sampleDuration': this._expiry,
};
const requests: any[] = [];
const errors: any[] = [];
if (ids.length === 0) {
return cb(null, statsRes);
}
// for now set concurrency to default of 10
return async.eachLimit(ids, 10, (id, done) => {
this.getStats(log, id, (err, res) => {
if (err) {
return done(err);
}
requests.push(res.requests);
errors.push(res['500s']);
return done();
});
}, error => {
if (error) {
log.error('error getting stats', {
error,
method: 'StatsModel.getAllStats',
});
return cb(null, statsRes);
}
statsRes.requests = this._zip(requests).map(arr =>
arr.reduce((acc, i) => acc + i), 0);
statsRes['500s'] = this._zip(errors).map(arr =>
arr.reduce((acc, i) => acc + i), 0);
return cb(null, statsRes);
});
}
/**
* Handles getting a list of global keys.
* @param ids - Service identifiers
* @param log - Werelogs request logger
* @param cb - Callback
*/
getAllGlobalStats(ids: string[], log: Logger, cb: Callback) {
const reqsKeys = ids.map(key => (['get', key]));
return this._redis.batch(reqsKeys, (err, res) => {
const statsRes = { requests: 0 };
if (err) {
log.error('error getting metrics', {
error: err,
method: 'StatsClient.getAllGlobalStats',
});
return cb(null, statsRes);
}
statsRes.requests = res.reduce((sum, curr) => {
const [cmdErr, val] = curr;
if (cmdErr) {
// Log any individual request errors from the batch request.
log.error('error getting metrics', {
error: cmdErr,
method: 'StatsClient.getAllGlobalStats',
});
}
return sum + (Number.parseInt(val, 10) || 0);
}, 0);
return cb(null, statsRes);
});
}
/** /**
* normalize date timestamp to the nearest hour * normalize date timestamp to the nearest hour
* @param d - Date instance * @param d - Date instance
* @return timestamp - normalized to the nearest hour * @return timestamp - normalized to the nearest hour
*/ */
normalizeTimestampByHour(d: Date) { normalizeTimestampByHour(d: Date): number {
return d.setMinutes(0, 0, 0); return d.setMinutes(0, 0, 0);
} }
@ -158,10 +21,40 @@ export default class StatsModel extends StatsClient {
* @param d - Date instance * @param d - Date instance
* @return timestamp - one hour prior to date passed * @return timestamp - one hour prior to date passed
*/ */
_getDatePreviousHour(d: Date) { _getDatePreviousHour(d: Date): number {
return d.setHours(d.getHours() - 1); return d.setHours(d.getHours() - 1);
} }
/**
* normalize to the nearest interval
* @param d - Date instance
* @return timestamp - normalized to the nearest interval
*/
_normalizeTimestamp(d: Date): number {
const m = d.getMinutes();
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0);
}
/**
* override the method to get the result as an array of integers separated
* by each interval
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
* @param arr - each index contains the result of each batch command
* where index 0 signifies the error and index 1 contains the result
* @return array of integers, ordered from most recent interval to
* oldest interval
*/
// @ts-ignore
// TODO change name or conform to parent class method
_getCount(arr: [any, string | null][]) {
return arr.reduce<number[]>((store, i) => {
let num = parseInt(i[1] ?? '', 10);
num = Number.isNaN(num) ? 0 : num;
store.push(num);
return store;
}, []);
}
/** /**
* get list of sorted set key timestamps * get list of sorted set key timestamps
* @param epoch - epoch time * @param epoch - epoch time

View File

@ -1,281 +0,0 @@
export type DeleteRetentionPolicy = {
enabled: boolean;
days: number;
};
/**
* Helper class to ease access to the Azure specific information for
* storage accounts mapped to buckets.
*/
export default class BucketAzureInfo {
_data: {
sku: string;
accessTier: string;
kind: string;
systemKeys: string[];
tenantKeys: string[];
subscriptionId: string;
resourceGroup: string;
deleteRetentionPolicy: DeleteRetentionPolicy;
managementPolicies: any[];
httpsOnly: boolean;
tags: any;
networkACL: any[];
cname: string;
azureFilesAADIntegration: boolean;
hnsEnabled: boolean;
logging: any;
hourMetrics: any;
minuteMetrics: any;
serviceVersion: string;
}
/**
* @constructor
* @param obj - Raw structure for the Azure info on storage account
* @param obj.sku - SKU name of this storage account
* @param obj.accessTier - Access Tier name of this storage account
* @param obj.kind - Kind name of this storage account
* @param obj.systemKeys - pair of shared keys for the system
* @param obj.tenantKeys - pair of shared keys for the tenant
* @param obj.subscriptionId - subscription ID the storage account
* belongs to
* @param obj.resourceGroup - Resource group name the storage
* account belongs to
* @param obj.deleteRetentionPolicy - Delete retention policy
* @param obj.deleteRetentionPolicy.enabled -
* @param obj.deleteRetentionPolicy.days -
* @param obj.managementPolicies - Management policies for this
* storage account
* @param obj.httpsOnly - Server the content of this storage
* account through HTTPS only
* @param obj.tags - Set of tags applied on this storage account
* @param obj.networkACL - Network ACL of this storage account
* @param obj.cname - CNAME of this storage account
* @param obj.azureFilesAADIntegration - whether or not Azure
* Files AAD Integration is enabled for this storage account
* @param obj.hnsEnabled - whether or not a hierarchical namespace
* is enabled for this storage account
* @param obj.logging - service properties: logging
* @param obj.hourMetrics - service properties: hourMetrics
* @param obj.minuteMetrics - service properties: minuteMetrics
* @param obj.serviceVersion - service properties: serviceVersion
*/
constructor(obj: {
sku: string;
accessTier: string;
kind: string;
systemKeys: string[];
tenantKeys: string[];
subscriptionId: string;
resourceGroup: string;
deleteRetentionPolicy: DeleteRetentionPolicy;
managementPolicies: any[];
httpsOnly: boolean;
tags: any;
networkACL: any[];
cname: string;
azureFilesAADIntegration: boolean;
hnsEnabled: boolean;
logging: any;
hourMetrics: any;
minuteMetrics: any;
serviceVersion: string;
}) {
this._data = {
sku: obj.sku,
accessTier: obj.accessTier,
kind: obj.kind,
systemKeys: obj.systemKeys,
tenantKeys: obj.tenantKeys,
subscriptionId: obj.subscriptionId,
resourceGroup: obj.resourceGroup,
deleteRetentionPolicy: obj.deleteRetentionPolicy,
managementPolicies: obj.managementPolicies,
httpsOnly: obj.httpsOnly,
tags: obj.tags,
networkACL: obj.networkACL,
cname: obj.cname,
azureFilesAADIntegration: obj.azureFilesAADIntegration,
hnsEnabled: obj.hnsEnabled,
logging: obj.logging,
hourMetrics: obj.hourMetrics,
minuteMetrics: obj.minuteMetrics,
serviceVersion: obj.serviceVersion,
};
}
getSku() {
return this._data.sku;
}
setSku(sku: string) {
this._data.sku = sku;
return this;
}
getAccessTier() {
return this._data.accessTier;
}
setAccessTier(accessTier: string) {
this._data.accessTier = accessTier;
return this;
}
getKind() {
return this._data.kind;
}
setKind(kind: string) {
this._data.kind = kind;
return this;
}
getSystemKeys() {
return this._data.systemKeys;
}
setSystemKeys(systemKeys: string[]) {
this._data.systemKeys = systemKeys;
return this;
}
getTenantKeys() {
return this._data.tenantKeys;
}
setTenantKeys(tenantKeys: string[]) {
this._data.tenantKeys = tenantKeys;
return this;
}
getSubscriptionId() {
return this._data.subscriptionId;
}
setSubscriptionId(subscriptionId: string) {
this._data.subscriptionId = subscriptionId;
return this;
}
getResourceGroup() {
return this._data.resourceGroup;
}
setResourceGroup(resourceGroup: string) {
this._data.resourceGroup = resourceGroup;
return this;
}
getDeleteRetentionPolicy() {
return this._data.deleteRetentionPolicy;
}
setDeleteRetentionPolicy(deleteRetentionPolicy: DeleteRetentionPolicy) {
this._data.deleteRetentionPolicy = deleteRetentionPolicy;
return this;
}
getManagementPolicies() {
return this._data.managementPolicies;
}
setManagementPolicies(managementPolicies: any[]) {
this._data.managementPolicies = managementPolicies;
return this;
}
getHttpsOnly() {
return this._data.httpsOnly;
}
setHttpsOnly(httpsOnly: boolean) {
this._data.httpsOnly = httpsOnly;
return this;
}
getTags() {
return this._data.tags;
}
setTags(tags: any) {
this._data.tags = tags;
return this;
}
getNetworkACL() {
return this._data.networkACL;
}
setNetworkACL(networkACL: any[]) {
this._data.networkACL = networkACL;
return this;
}
getCname() {
return this._data.cname;
}
setCname(cname: string) {
this._data.cname = cname;
return this;
}
getAzureFilesAADIntegration() {
return this._data.azureFilesAADIntegration;
}
setAzureFilesAADIntegration(azureFilesAADIntegration: boolean) {
this._data.azureFilesAADIntegration = azureFilesAADIntegration;
return this;
}
getHnsEnabled() {
return this._data.hnsEnabled;
}
setHnsEnabled(hnsEnabled: boolean) {
this._data.hnsEnabled = hnsEnabled;
return this;
}
getLogging() {
return this._data.logging;
}
setLogging(logging: any) {
this._data.logging = logging;
return this;
}
getHourMetrics() {
return this._data.hourMetrics;
}
setHourMetrics(hourMetrics: any) {
this._data.hourMetrics = hourMetrics;
return this;
}
getMinuteMetrics() {
return this._data.minuteMetrics;
}
setMinuteMetrics(minuteMetrics: any) {
this._data.minuteMetrics = minuteMetrics;
return this;
}
getServiceVersion() {
return this._data.serviceVersion;
}
setServiceVersion(serviceVersion: any) {
this._data.serviceVersion = serviceVersion;
return this;
}
getValue() {
return this._data;
}
}

View File

@ -8,12 +8,10 @@ import ObjectLockConfiguration from './ObjectLockConfiguration';
import BucketPolicy from './BucketPolicy'; import BucketPolicy from './BucketPolicy';
import NotificationConfiguration from './NotificationConfiguration'; import NotificationConfiguration from './NotificationConfiguration';
import { ACL as OACL } from './ObjectMD'; import { ACL as OACL } from './ObjectMD';
import { areTagsValid, BucketTag } from '../s3middleware/tagging';
// WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG // WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG
// BucketInfoModelVersion.md can be found in documentation/ at the root // BucketInfoModelVersion.md can be found in the root of this repository
// of this repository const modelVersion = 10;
const modelVersion = 16;
export type CORS = { export type CORS = {
id: string; id: string;
@ -37,41 +35,6 @@ export type VersioningConfiguration = {
MfaDelete: any; MfaDelete: any;
}; };
export type VeeamSOSApi = {
SystemInfo?: {
ProtocolVersion: string,
ModelName: string,
ProtocolCapabilities: {
CapacityInfo: boolean,
UploadSessions: boolean,
IAMSTS?: boolean,
},
APIEndpoints?: {
IAMEndpoint: string,
STSEndpoint: string,
},
SystemRecommendations?: {
S3ConcurrentTaskLimit: number,
S3MultiObjectDelete: number,
StorageCurrentTasksLimit: number,
KbBlockSize: number,
}
LastModified?: string,
},
CapacityInfo?: {
Capacity: number,
Available: number,
Used: number,
LastModified?: string,
},
};
// Capabilities contains all specifics from external products supported by
// our S3 implementation, at bucket level
export type Capabilities = {
VeeamSOSApi?: VeeamSOSApi,
};
export type ACL = OACL & { WRITE: string[] } export type ACL = OACL & { WRITE: string[] }
export default class BucketInfo { export default class BucketInfo {
@ -95,70 +58,56 @@ export default class BucketInfo {
_objectLockEnabled?: boolean; _objectLockEnabled?: boolean;
_objectLockConfiguration?: any; _objectLockConfiguration?: any;
_notificationConfiguration?: any; _notificationConfiguration?: any;
_tags?: Array<BucketTag>; _tags?: { key: string; value: string }[] | null;
_readLocationConstraint: string | null;
_isNFS: boolean | null;
_azureInfo: any | null;
_ingestion: { status: 'enabled' | 'disabled' } | null;
_capabilities?: Capabilities;
_quotaMax: number | 0;
/** /**
* Represents all bucket information. * Represents all bucket information.
* @constructor * @constructor
* @param name - bucket name * @param {string} name - bucket name
* @param owner - bucket owner's name * @param {string} owner - bucket owner's name
* @param ownerDisplayName - owner's display name * @param {string} ownerDisplayName - owner's display name
* @param creationDate - creation date of bucket * @param {object} creationDate - creation date of bucket
* @param mdBucketModelVersion - bucket model version * @param {number} mdBucketModelVersion - bucket model version
* @param [acl] - bucket ACLs (no need to copy * @param {object} [acl] - bucket ACLs (no need to copy
* ACL object since referenced object will not be used outside of * ACL object since referenced object will not be used outside of
* BucketInfo instance) * BucketInfo instance)
* @param transient - flag indicating whether bucket is transient * @param {boolean} transient - flag indicating whether bucket is transient
* @param deleted - flag indicating whether attempt to delete * @param {boolean} deleted - flag indicating whether attempt to delete
* @param serverSideEncryption - sse information for this bucket * @param {object} serverSideEncryption - sse information for this bucket
* @param serverSideEncryption.cryptoScheme - * @param {number} serverSideEncryption.cryptoScheme -
* cryptoScheme used * cryptoScheme used
* @param serverSideEncryption.algorithm - * @param {string} serverSideEncryption.algorithm -
* algorithm to use * algorithm to use
* @param serverSideEncryption.masterKeyId - * @param {string} serverSideEncryption.masterKeyId -
* key to get master key * key to get master key
* @param serverSideEncryption.configuredMasterKeyId - * @param {string} serverSideEncryption.configuredMasterKeyId -
* custom KMS key id specified by user * custom KMS key id specified by user
* @param serverSideEncryption.mandatory - * @param {boolean} serverSideEncryption.mandatory -
* true for mandatory encryption * true for mandatory encryption
* bucket has been made * bucket has been made
* @param versioningConfiguration - versioning configuration * @param {object} versioningConfiguration - versioning configuration
* @param versioningConfiguration.Status - versioning status * @param {string} versioningConfiguration.Status - versioning status
* @param versioningConfiguration.MfaDelete - versioning mfa delete * @param {object} versioningConfiguration.MfaDelete - versioning mfa delete
* @param locationConstraint - locationConstraint for bucket that * @param {string} locationConstraint - locationConstraint for bucket
* also includes the ingestion flag * @param {WebsiteConfiguration} [websiteConfiguration] - website
* @param [websiteConfiguration] - website
* configuration * configuration
* @param [cors] - collection of CORS rules to apply * @param {object[]} [cors] - collection of CORS rules to apply
* @param [cors[].id] - optional ID to identify rule * @param {string} [cors[].id] - optional ID to identify rule
* @param cors[].allowedMethods - methods allowed for CORS request * @param {string[]} cors[].allowedMethods - methods allowed for CORS request
* @param cors[].allowedOrigins - origins allowed for CORS request * @param {string[]} cors[].allowedOrigins - origins allowed for CORS request
* @param [cors[].allowedHeaders] - headers allowed in an OPTIONS * @param {string[]} [cors[].allowedHeaders] - headers allowed in an OPTIONS
* request via the Access-Control-Request-Headers header * request via the Access-Control-Request-Headers header
* @param [cors[].maxAgeSeconds] - seconds browsers should cache * @param {number} [cors[].maxAgeSeconds] - seconds browsers should cache
* OPTIONS response * OPTIONS response
* @param [cors[].exposeHeaders] - headers expose to applications * @param {string[]} [cors[].exposeHeaders] - headers expose to applications
* @param [replicationConfiguration] - replication configuration * @param {object} [replicationConfiguration] - replication configuration
* @param [lifecycleConfiguration] - lifecycle configuration * @param {object} [lifecycleConfiguration] - lifecycle configuration
* @param [bucketPolicy] - bucket policy * @param {object} [bucketPolicy] - bucket policy
* @param [uid] - unique identifier for the bucket, necessary * @param {string} [uid] - unique identifier for the bucket, necessary
* @param readLocationConstraint - readLocationConstraint for bucket * @param {boolean} [objectLockEnabled] - true when object lock enabled
* addition for use with lifecycle operations * @param {object} [objectLockConfiguration] - object lock configuration
* @param [isNFS] - whether the bucket is on NFS * @param {object} [notificationConfiguration] - bucket notification configuration
* @param [ingestionConfig] - object for ingestion status: en/dis * @param {object[]} [tags] - bucket tags
* @param [azureInfo] - Azure storage account specific info
* @param [objectLockEnabled] - true when object lock enabled
* @param [objectLockConfiguration] - object lock configuration
* @param [notificationConfiguration] - bucket notification configuration
* @param [tags] - bucket tag set
* @param [capabilities] - capabilities for the bucket
* @param quotaMax - bucket quota
*/ */
constructor( constructor(
name: string, name: string,
@ -178,16 +127,10 @@ export default class BucketInfo {
lifecycleConfiguration?: any, lifecycleConfiguration?: any,
bucketPolicy?: any, bucketPolicy?: any,
uid?: string, uid?: string,
readLocationConstraint?: string,
isNFS?: boolean,
ingestionConfig?: { status: 'enabled' | 'disabled' },
azureInfo?: any,
objectLockEnabled?: boolean, objectLockEnabled?: boolean,
objectLockConfiguration?: any, objectLockConfiguration?: any,
notificationConfiguration?: any, notificationConfiguration?: any,
tags?: Array<BucketTag> | [], tags?: { key: string; value: string }[],
capabilities?: Capabilities,
quotaMax?: number | 0,
) { ) {
assert.strictEqual(typeof name, 'string'); assert.strictEqual(typeof name, 'string');
assert.strictEqual(typeof owner, 'string'); assert.strictEqual(typeof owner, 'string');
@ -229,15 +172,6 @@ export default class BucketInfo {
if (locationConstraint) { if (locationConstraint) {
assert.strictEqual(typeof locationConstraint, 'string'); assert.strictEqual(typeof locationConstraint, 'string');
} }
if (ingestionConfig) {
assert.strictEqual(typeof ingestionConfig, 'object');
}
if (azureInfo) {
assert.strictEqual(typeof azureInfo, 'object');
}
if (readLocationConstraint) {
assert.strictEqual(typeof readLocationConstraint, 'string');
}
if (websiteConfiguration) { if (websiteConfiguration) {
assert(websiteConfiguration instanceof WebsiteConfiguration); assert(websiteConfiguration instanceof WebsiteConfiguration);
const indexDocument = websiteConfiguration.getIndexDocument(); const indexDocument = websiteConfiguration.getIndexDocument();
@ -283,14 +217,8 @@ export default class BucketInfo {
READ: [], READ: [],
READ_ACP: [], READ_ACP: [],
}; };
if (tags) {
if (tags === undefined) { assert(Array.isArray(tags));
tags = [] as BucketTag[];
}
assert.strictEqual(areTagsValid(tags), true);
if (quotaMax) {
assert.strictEqual(typeof quotaMax, 'number');
assert(quotaMax >= 0, 'Quota cannot be negative');
} }
// IF UPDATING PROPERTIES, INCREMENT MODELVERSION NUMBER ABOVE // IF UPDATING PROPERTIES, INCREMENT MODELVERSION NUMBER ABOVE
@ -305,22 +233,16 @@ export default class BucketInfo {
this._serverSideEncryption = serverSideEncryption || null; this._serverSideEncryption = serverSideEncryption || null;
this._versioningConfiguration = versioningConfiguration || null; this._versioningConfiguration = versioningConfiguration || null;
this._locationConstraint = locationConstraint || null; this._locationConstraint = locationConstraint || null;
this._readLocationConstraint = readLocationConstraint || null;
this._websiteConfiguration = websiteConfiguration || null; this._websiteConfiguration = websiteConfiguration || null;
this._replicationConfiguration = replicationConfiguration || null; this._replicationConfiguration = replicationConfiguration || null;
this._cors = cors || null; this._cors = cors || null;
this._lifecycleConfiguration = lifecycleConfiguration || null; this._lifecycleConfiguration = lifecycleConfiguration || null;
this._bucketPolicy = bucketPolicy || null; this._bucketPolicy = bucketPolicy || null;
this._uid = uid || uuid(); this._uid = uid || uuid();
this._isNFS = isNFS || null;
this._ingestion = ingestionConfig || null;
this._azureInfo = azureInfo || null;
this._objectLockEnabled = objectLockEnabled || false; this._objectLockEnabled = objectLockEnabled || false;
this._objectLockConfiguration = objectLockConfiguration || null; this._objectLockConfiguration = objectLockConfiguration || null;
this._notificationConfiguration = notificationConfiguration || null; this._notificationConfiguration = notificationConfiguration || null;
this._tags = tags; this._tags = tags || null;
this._capabilities = capabilities || undefined;
this._quotaMax = quotaMax || 0;
return this; return this;
} }
@ -341,22 +263,16 @@ export default class BucketInfo {
serverSideEncryption: this._serverSideEncryption, serverSideEncryption: this._serverSideEncryption,
versioningConfiguration: this._versioningConfiguration, versioningConfiguration: this._versioningConfiguration,
locationConstraint: this._locationConstraint, locationConstraint: this._locationConstraint,
readLocationConstraint: this._readLocationConstraint,
websiteConfiguration: undefined, websiteConfiguration: undefined,
cors: this._cors, cors: this._cors,
replicationConfiguration: this._replicationConfiguration, replicationConfiguration: this._replicationConfiguration,
lifecycleConfiguration: this._lifecycleConfiguration, lifecycleConfiguration: this._lifecycleConfiguration,
bucketPolicy: this._bucketPolicy, bucketPolicy: this._bucketPolicy,
uid: this._uid, uid: this._uid,
isNFS: this._isNFS,
ingestion: this._ingestion,
azureInfo: this._azureInfo,
objectLockEnabled: this._objectLockEnabled, objectLockEnabled: this._objectLockEnabled,
objectLockConfiguration: this._objectLockConfiguration, objectLockConfiguration: this._objectLockConfiguration,
notificationConfiguration: this._notificationConfiguration, notificationConfiguration: this._notificationConfiguration,
tags: this._tags, tags: this._tags,
capabilities: this._capabilities,
quotaMax: this._quotaMax,
}; };
const final = this._websiteConfiguration const final = this._websiteConfiguration
? { ? {
@ -380,10 +296,8 @@ export default class BucketInfo {
obj.transient, obj.deleted, obj.serverSideEncryption, obj.transient, obj.deleted, obj.serverSideEncryption,
obj.versioningConfiguration, obj.locationConstraint, websiteConfig, obj.versioningConfiguration, obj.locationConstraint, websiteConfig,
obj.cors, obj.replicationConfiguration, obj.lifecycleConfiguration, obj.cors, obj.replicationConfiguration, obj.lifecycleConfiguration,
obj.bucketPolicy, obj.uid, obj.readLocationConstraint, obj.isNFS, obj.bucketPolicy, obj.uid, obj.objectLockEnabled,
obj.ingestion, obj.azureInfo, obj.objectLockEnabled, obj.objectLockConfiguration, obj.notificationConfiguration, obj.tags);
obj.objectLockConfiguration, obj.notificationConfiguration, obj.tags,
obj.capabilities, obj.quotaMax);
} }
/** /**
@ -407,11 +321,8 @@ export default class BucketInfo {
data._versioningConfiguration, data._locationConstraint, data._versioningConfiguration, data._locationConstraint,
data._websiteConfiguration, data._cors, data._websiteConfiguration, data._cors,
data._replicationConfiguration, data._lifecycleConfiguration, data._replicationConfiguration, data._lifecycleConfiguration,
data._bucketPolicy, data._uid, data._readLocationConstraint, data._bucketPolicy, data._uid, data._objectLockEnabled,
data._isNFS, data._ingestion, data._azureInfo, data._objectLockConfiguration, data._notificationConfiguration, data._tags);
data._objectLockEnabled, data._objectLockConfiguration,
data._notificationConfiguration, data._tags, data._capabilities,
data._quotaMax);
} }
/** /**
@ -708,17 +619,6 @@ export default class BucketInfo {
return this._locationConstraint; return this._locationConstraint;
} }
/**
* Get read location constraint.
* @return - bucket read location constraint
*/
getReadLocationConstraint() {
if (this._readLocationConstraint) {
return this._readLocationConstraint;
}
return this._locationConstraint;
}
/** /**
* Set Bucket model version * Set Bucket model version
* *
@ -807,85 +707,6 @@ export default class BucketInfo {
this._uid = uid; this._uid = uid;
return this; return this;
} }
/**
* Check if the bucket is an NFS bucket.
* @return - Wether the bucket is NFS or not
*/
isNFS() {
return this._isNFS;
}
/**
* Set whether the bucket is an NFS bucket.
* @param isNFS - Wether the bucket is NFS or not
* @return - bucket info instance
*/
setIsNFS(isNFS: boolean) {
this._isNFS = isNFS;
return this;
}
/**
* enable ingestion, set 'this._ingestion' to { status: 'enabled' }
* @return - bucket info instance
*/
enableIngestion() {
this._ingestion = { status: 'enabled' };
return this;
}
/**
* disable ingestion, set 'this._ingestion' to { status: 'disabled' }
* @return - bucket info instance
*/
disableIngestion() {
this._ingestion = { status: 'disabled' };
return this;
}
/**
* Get ingestion configuration
* @return - bucket ingestion configuration: Enabled or Disabled
*/
getIngestion() {
return this._ingestion;
}
/**
** Check if bucket is an ingestion bucket
* @return - 'true' if bucket is ingestion bucket, 'false' if
* otherwise
*/
isIngestionBucket() {
const ingestionConfig = this.getIngestion();
if (ingestionConfig) {
return true;
}
return false;
}
/**
* Check if ingestion is enabled
* @return - 'true' if ingestion is enabled, otherwise 'false'
*/
isIngestionEnabled() {
const ingestionConfig = this.getIngestion();
return ingestionConfig ? ingestionConfig.status === 'enabled' : false;
}
/**
* Return the Azure specific storage account information for this bucket
* @return - a structure suitable for {@link BucketAzureIno}
* constructor
*/
getAzureInfo() {
return this._azureInfo;
}
/**
* Set the Azure specific storage account information for this bucket
* @param azureInfo - a structure suitable for
* {@link BucketAzureInfo} construction
* @return - bucket info instance
*/
setAzureInfo(azureInfo: any) {
this._azureInfo = azureInfo;
return this;
}
/** /**
* Check if object lock is enabled. * Check if object lock is enabled.
* @return - depending on whether object lock is enabled * @return - depending on whether object lock is enabled
@ -905,7 +726,7 @@ export default class BucketInfo {
/** /**
* Get the value of bucket tags * Get the value of bucket tags
* @return - Array of bucket tags * @return - Array of bucket tags as {"key" : "key", "value": "value"}
*/ */
getTags() { getTags() {
return this._tags; return this._tags;
@ -913,58 +734,13 @@ export default class BucketInfo {
/** /**
* Set bucket tags * Set bucket tags
* @param tags - collection of tags
* @param tags[].key - key of the tag
* @param tags[].value - value of the tag
* @return - bucket info instance * @return - bucket info instance
*/ */
setTags(tags: Array<BucketTag>) { setTags(tags: { key: string; value: string }[]) {
this._tags = tags; this._tags = tags;
return this; return this;
} }
/**
* Get the value of bucket capabilities
* @return - capabilities of the bucket
*/
getCapabilities() {
return this._capabilities;
}
/**
* Get a specific bucket capability
*
* @param capability? - if provided, will return a specific capacity
* @return - capability of the bucket
*/
getCapability(capability: string) : VeeamSOSApi | undefined {
if (capability && this._capabilities && this._capabilities[capability]) {
return this._capabilities[capability];
}
return undefined;
}
/**
* Set bucket capabilities
* @return - bucket info instance
*/
setCapabilities(capabilities: Capabilities) {
this._capabilities = capabilities;
return this;
}
/**
* Get the bucket quota information
* @return quotaMax
*/
getQuota() {
return this._quotaMax;
}
/**
* Set bucket quota
* @param quota - quota to be set
* @return - bucket quota info
*/
setQuota(quota: number) {
this._quotaMax = quota || 0;
return this;
}
} }

View File

@ -7,8 +7,6 @@ import escapeForXml from '../s3middleware/escapeForXml';
import type { XMLRule } from './ReplicationConfiguration'; import type { XMLRule } from './ReplicationConfiguration';
import { Status } from './LifecycleRule'; import { Status } from './LifecycleRule';
const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer.
/** /**
* Format of xml request: * Format of xml request:
@ -89,7 +87,6 @@ export default class LifecycleConfiguration {
_parsedXML: any; _parsedXML: any;
_ruleIDs: string[]; _ruleIDs: string[];
_tagKeys: string[]; _tagKeys: string[];
_storageClasses: string[];
_config: { _config: {
error?: ArsenalError; error?: ArsenalError;
rules?: any[]; rules?: any[];
@ -98,13 +95,10 @@ export default class LifecycleConfiguration {
/** /**
* Create a Lifecycle Configuration instance * Create a Lifecycle Configuration instance
* @param xml - the parsed xml * @param xml - the parsed xml
* @param config - the CloudServer config
* @return - LifecycleConfiguration instance * @return - LifecycleConfiguration instance
*/ */
constructor(xml: any, config: { replicationEndpoints: { site: string }[] }) { constructor(xml: any) {
this._parsedXML = xml; this._parsedXML = xml;
this._storageClasses =
config.replicationEndpoints.map(endpoint => endpoint.site);
this._ruleIDs = []; this._ruleIDs = [];
this._tagKeys = []; this._tagKeys = [];
this._config = {}; this._config = {};
@ -225,6 +219,11 @@ export default class LifecycleConfiguration {
* } * }
*/ */
_parseRule(rule: XMLRule) { _parseRule(rule: XMLRule) {
if (rule.Transition || rule.NoncurrentVersionTransition) {
const msg = 'Transition lifecycle action not yet implemented';
const error = errors.NotImplemented.customizeDescription(msg);
return { error };
}
// Either Prefix or Filter must be included, but can be empty string // Either Prefix or Filter must be included, but can be empty string
if ((!rule.Filter && rule.Filter !== '') && if ((!rule.Filter && rule.Filter !== '') &&
(!rule.Prefix && rule.Prefix !== '')) { (!rule.Prefix && rule.Prefix !== '')) {
@ -493,172 +492,6 @@ export default class LifecycleConfiguration {
return { ...base, ruleStatus: status } return { ...base, ruleStatus: status }
} }
/**
* Finds the prefix and/or tags of the given rule and gets the error message
* @param rule - The rule to find the prefix in
* @return - The prefix of filter information
*/
_getRuleFilterDesc(rule: { Prefix?: string[]; Filter?: any[] }) {
if (rule.Prefix) {
return `prefix '${rule.Prefix[0]}'`;
}
// There must be a filter if no top-level prefix is provided. First
// check if there are multiple filters (i.e. `Filter.And`).
if (rule.Filter?.[0] === undefined || rule.Filter[0].And === undefined) {
const { Prefix, Tag } = rule.Filter?.[0] || {};
if (Prefix) {
return `filter '(prefix=${Prefix[0]})'`;
}
if (Tag) {
const { Key, Value } = Tag[0];
return `filter '(tag: key=${Key[0]}, value=${Value[0]})'`;
}
return 'filter (all)';
}
const filters: string[] = [];
const { Prefix, Tag } = rule.Filter[0].And[0];
if (Prefix) {
filters.push(`prefix=${Prefix[0]}`);
}
Tag.forEach((tag: { Key: string[]; Value: string[] }) => {
const { Key, Value } = tag;
filters.push(`tag: key=${Key[0]}, value=${Value[0]}`);
});
const joinedFilters = filters.join(' and ');
return `filter '(${joinedFilters})'`;
}
/**
* Checks the validity of the given field
* @param params - Given function parameters
* @param params.days - The value of the field to check
* @param params.field - The field name with the value
* @param params.ancestor - The immediate ancestor field
* @return Returns an error object or `null`
*/
_checkDays(params: { days: number; field: string; ancestor: string }) {
const { days, field, ancestor } = params;
if (days < 0) {
const msg = `'${field}' in ${ancestor} action must be nonnegative`;
return errors.InvalidArgument.customizeDescription(msg);
}
if (days > MAX_DAYS) {
return errors.MalformedXML.customizeDescription(
`'${field}' in ${ancestor} action must not exceed ${MAX_DAYS}`);
}
return null;
}
/**
* Checks the validity of the given storage class
* @param params - Given function parameters
* @param params.usedStorageClasses - Storage classes used in other
* rules
* @param params.storageClass - The storage class of the current
* rule
* @param params.ancestor - The immediate ancestor field
* @param params.prefix - The prefix of the rule
* @return Returns an error object or `null`
*/
_checkStorageClasses(params: {
usedStorageClasses: string[];
storageClass: string;
ancestor: string;
rule: { Prefix?: string[]; Filter?: any };
}) {
const { usedStorageClasses, storageClass, ancestor, rule } = params;
if (!this._storageClasses.includes(storageClass)) {
// This differs from the AWS message. This will help the user since
// the StorageClass does not conform to AWS specs.
const list = `'${this._storageClasses.join("', '")}'`;
const msg = `'StorageClass' must be one of ${list}`;
return errors.MalformedXML.customizeDescription(msg);
}
if (usedStorageClasses.includes(storageClass)) {
const msg = `'StorageClass' must be different for '${ancestor}' ` +
`actions in same 'Rule' with ${this._getRuleFilterDesc(rule)}`;
return errors.InvalidRequest.customizeDescription(msg);
}
return null;
}
/**
* Ensure that transition rules are at least a day apart from each other.
* @param params - Given function parameters
* @param [params.days] - The days of the current transition
* @param [params.date] - The date of the current transition
* @param params.storageClass - The storage class of the current
* rule
* @param params.rule - The current rule
*/
_checkTimeGap(params: {
days?: number;
date?: string;
storageClass: string;
rule: { Transition: any[]; Prefix?: string[]; Filter?: any };
}) {
const { days, date, storageClass, rule } = params;
const invalidTransition = rule.Transition.find(transition => {
if (storageClass === transition.StorageClass[0]) {
return false;
}
if (days !== undefined) {
return Number.parseInt(transition.Days[0], 10) === days;
}
if (date !== undefined) {
const timestamp = new Date(date).getTime();
const compareTimestamp = new Date(transition.Date[0]).getTime();
const oneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day.
return Math.abs(timestamp - compareTimestamp) < oneDay;
}
return false;
});
if (invalidTransition) {
const timeType = days !== undefined ? 'Days' : 'Date';
const filterMsg = this._getRuleFilterDesc(rule);
const compareStorageClass = invalidTransition.StorageClass[0];
const msg = `'${timeType}' in the 'Transition' action for ` +
`StorageClass '${storageClass}' for ${filterMsg} must be at ` +
`least one day apart from ${filterMsg} in the 'Transition' ` +
`action for StorageClass '${compareStorageClass}'`;
return errors.InvalidArgument.customizeDescription(msg);
}
return null;
}
/**
* Checks transition time type (i.e. 'Date' or 'Days') only occurs once
* across transitions and across transitions and expiration policies
* @param params - Given function parameters
* @param params.usedTimeType - The time type that has been used by
* another rule
* @param params.currentTimeType - the time type used by the
* current rule
* @param params.rule - The current rule
* @return Returns an error object or `null`
*/
_checkTimeType(params: {
usedTimeType: string | null;
currentTimeType: string;
rule: { Prefix?: string[]; Filter?: any; Expiration?: any[] };
}) {
const { usedTimeType, currentTimeType, rule } = params;
if (usedTimeType && usedTimeType !== currentTimeType) {
const msg = "Found mixed 'Date' and 'Days' based Transition " +
'actions in lifecycle rule for ' +
`${this._getRuleFilterDesc(rule)}`;
return errors.InvalidRequest.customizeDescription(msg);
}
// Transition time type cannot differ from the expiration, if provided.
if (rule.Expiration &&
rule.Expiration[0][currentTimeType] === undefined) {
const msg = "Found mixed 'Date' and 'Days' based Expiration and " +
'Transition actions in lifecycle rule for ' +
`${this._getRuleFilterDesc(rule)}`;
return errors.InvalidRequest.customizeDescription(msg);
}
return null;
}
/** /**
* Checks the validity of the given date * Checks the validity of the given date
@ -700,159 +533,6 @@ export default class LifecycleConfiguration {
} }
return null; return null;
} }
/**
* Parses the NonCurrentVersionTransition value
* @param rule - Rule object from Rule array from this._parsedXml
* @return - Contains error if parsing failed, otherwise contains
* the parsed nonCurrentVersionTransition array
*
* Format of result:
* result = {
* error: <error>,
* nonCurrentVersionTransition: [
* {
* noncurrentDays: <non-current-days>,
* storageClass: <storage-class>,
* },
* ...
* ]
* }
*/
_parseNoncurrentVersionTransition(rule: {
NoncurrentVersionTransition: any[];
Prefix?: string[];
Filter?: any;
}) {
const nonCurrentVersionTransition: {
noncurrentDays: number;
storageClass: string;
}[] = [];
const usedStorageClasses: string[] = [];
for (let i = 0; i < rule.NoncurrentVersionTransition.length; i++) {
const t = rule.NoncurrentVersionTransition[i]; // Transition object
const noncurrentDays: number | undefined =
t.NoncurrentDays && Number.parseInt(t.NoncurrentDays[0], 10);
const storageClass: string | undefined = t.StorageClass && t.StorageClass[0];
if (noncurrentDays === undefined || storageClass === undefined) {
return { error: errors.MalformedXML };
}
let error = this._checkDays({
days: noncurrentDays,
field: 'NoncurrentDays',
ancestor: 'NoncurrentVersionTransition',
});
if (error) {
return { error };
}
error = this._checkStorageClasses({
storageClass,
usedStorageClasses,
ancestor: 'NoncurrentVersionTransition',
rule,
});
if (error) {
return { error };
}
nonCurrentVersionTransition.push({ noncurrentDays, storageClass });
usedStorageClasses.push(storageClass);
}
return { nonCurrentVersionTransition };
}
/**
* Parses the Transition value
* @param rule - Rule object from Rule array from this._parsedXml
* @return - Contains error if parsing failed, otherwise contains
* the parsed transition array
*
* Format of result:
* result = {
* error: <error>,
* transition: [
* {
* days: <days>,
* date: <date>,
* storageClass: <storage-class>,
* },
* ...
* ]
* }
*/
_parseTransition(rule: {
Transition: any[];
Prefix?: string[];
Filter?: any;
}) {
const transition:
({ days: number; storageClass: string }
| { date: string; storageClass: string })[] = [];
const usedStorageClasses: string[] = [];
let usedTimeType: string | null = null;
for (let i = 0; i < rule.Transition.length; i++) {
const t = rule.Transition[i]; // Transition object
const days = t.Days && Number.parseInt(t.Days[0], 10);
const date = t.Date && t.Date[0];
const storageClass = t.StorageClass && t.StorageClass[0];
if ((days === undefined && date === undefined) ||
(days !== undefined && date !== undefined) ||
(storageClass === undefined)) {
return { error: errors.MalformedXML };
}
let error = this._checkStorageClasses({
storageClass,
usedStorageClasses,
ancestor: 'Transition',
rule,
});
if (error) {
return { error };
}
usedStorageClasses.push(storageClass);
if (days !== undefined) {
error = this._checkTimeType({
usedTimeType,
currentTimeType: 'Days',
rule,
});
if (error) {
return { error };
}
usedTimeType = 'Days';
error = this._checkDays({
days,
field: 'Days',
ancestor: 'Transition',
});
if (error) {
return { error };
}
transition.push({ days, storageClass });
}
if (date !== undefined) {
error = this._checkTimeType({
usedTimeType,
currentTimeType: 'Date',
rule,
});
if (error) {
return { error };
}
usedTimeType = 'Date';
error = this._checkDate(date);
if (error) {
return { error };
}
transition.push({ date, storageClass });
}
error = this._checkTimeGap({ days, date, storageClass, rule });
if (error) {
return { error };
}
}
return { transition };
}
/** /**
* Check that action component of rule is valid * Check that action component of rule is valid
* @param rule - a rule object from Rule array from this._parsedXml * @param rule - a rule object from Rule array from this._parsedXml
@ -889,13 +569,8 @@ export default class LifecycleConfiguration {
propName: 'actions', propName: 'actions',
actions: [], actions: [],
}; };
const validActions = [ const validActions = ['AbortIncompleteMultipartUpload',
'AbortIncompleteMultipartUpload', 'Expiration', 'NoncurrentVersionExpiration'];
'Expiration',
'NoncurrentVersionExpiration',
'NoncurrentVersionTransition',
'Transition',
];
validActions.forEach(a => { validActions.forEach(a => {
if (rule[a]) { if (rule[a]) {
actionsObj.actions.push({ actionName: `${a}` }); actionsObj.actions.push({ actionName: `${a}` });
@ -912,14 +587,7 @@ export default class LifecycleConfiguration {
if (action.error) { if (action.error) {
actionsObj.error = action.error; actionsObj.error = action.error;
} else { } else {
const actionTimes = [ const actionTimes = ['days', 'date', 'deleteMarker', 'newerNoncurrentVersions'];
'days',
'date',
'deleteMarker',
'transition',
'nonCurrentVersionTransition',
'newerNoncurrentVersions'
];
actionTimes.forEach(t => { actionTimes.forEach(t => {
if (action[t]) { if (action[t]) {
// eslint-disable-next-line no-param-reassign // eslint-disable-next-line no-param-reassign
@ -1153,26 +821,6 @@ export default class LifecycleConfiguration {
if (a.deleteMarker) { if (a.deleteMarker) {
assert.strictEqual(typeof a.deleteMarker, 'string'); assert.strictEqual(typeof a.deleteMarker, 'string');
} }
if (a.nonCurrentVersionTransition) {
assert.strictEqual(
typeof a.nonCurrentVersionTransition, 'object');
a.nonCurrentVersionTransition.forEach(t => {
assert.strictEqual(typeof t.noncurrentDays, 'number');
assert.strictEqual(typeof t.storageClass, 'string');
});
}
if (a.transition) {
assert.strictEqual(typeof a.transition, 'object');
a.transition.forEach(t => {
if (t.days || t.days === 0) {
assert.strictEqual(typeof t.days, 'number');
}
if (t.date !== undefined) {
assert.strictEqual(typeof t.date, 'string');
}
assert.strictEqual(typeof t.storageClass, 'string');
});
}
if (a.newerNoncurrentVersions) { if (a.newerNoncurrentVersions) {
assert.strictEqual(typeof a.newerNoncurrentVersions, 'number'); assert.strictEqual(typeof a.newerNoncurrentVersions, 'number');
@ -1226,15 +874,7 @@ export default class LifecycleConfiguration {
} }
const Actions = actions.map(action => { const Actions = actions.map(action => {
const { const { actionName, days, date, deleteMarker, newerNoncurrentVersions } = action;
actionName,
days,
date,
deleteMarker,
nonCurrentVersionTransition,
transition,
newerNoncurrentVersions,
} = action;
let Action: any; let Action: any;
if (actionName === 'AbortIncompleteMultipartUpload') { if (actionName === 'AbortIncompleteMultipartUpload') {
Action = `<${actionName}><DaysAfterInitiation>${days}` + Action = `<${actionName}><DaysAfterInitiation>${days}` +
@ -1253,40 +893,6 @@ export default class LifecycleConfiguration {
Action = `<${actionName}>${Days}${Date}${DelMarker}` + Action = `<${actionName}>${Days}${Date}${DelMarker}` +
`</${actionName}>`; `</${actionName}>`;
} }
if (actionName === 'NoncurrentVersionTransition') {
const xml: string[] = [];
nonCurrentVersionTransition!.forEach(transition => {
const { noncurrentDays, storageClass } = transition;
xml.push(
`<${actionName}>`,
`<NoncurrentDays>${noncurrentDays}` +
'</NoncurrentDays>',
`<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>`,
);
});
Action = xml.join('');
}
if (actionName === 'Transition') {
const xml: string[] = [];
transition!.forEach(transition => {
const { days, date, storageClass } = transition;
let element: string = '';
if (days !== undefined) {
element = `<Days>${days}</Days>`;
}
if (date !== undefined) {
element = `<Date>${date}</Date>`;
}
xml.push(
`<${actionName}>`,
element,
`<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>`,
);
});
Action = xml.join('');
}
return Action; return Action;
}).join(''); }).join('');
return `<Rule>${ID}${Status}${Filter}${Actions}</Rule>`; return `<Rule>${ID}${Status}${Filter}${Actions}</Rule>`;
@ -1369,15 +975,6 @@ export type Rule = {
date?: number; date?: number;
deleteMarker?: boolean; deleteMarker?: boolean;
newerNoncurrentVersions?: number; newerNoncurrentVersions?: number;
nonCurrentVersionTransition?: {
noncurrentDays: number;
storageClass: string;
}[];
transition?: {
days?: number;
date?: string;
storageClass: string;
}[];
}[]; }[];
filter?: { filter?: {
rulePrefix?: string; rulePrefix?: string;

View File

@ -28,7 +28,6 @@ export default class LifecycleRule {
ncvExpiration?: NoncurrentExpiration; ncvExpiration?: NoncurrentExpiration;
abortMPU?: { DaysAfterInitiation: number }; abortMPU?: { DaysAfterInitiation: number };
transitions?: any[]; transitions?: any[];
ncvTransitions?: any[];
prefix?: string; prefix?: string;
constructor(id: string, status: Status) { constructor(id: string, status: Status) {
@ -46,7 +45,6 @@ export default class LifecycleRule {
NoncurrentVersionExpiration?: NoncurrentExpiration; NoncurrentVersionExpiration?: NoncurrentExpiration;
AbortIncompleteMultipartUpload?: { DaysAfterInitiation: number }; AbortIncompleteMultipartUpload?: { DaysAfterInitiation: number };
Transitions?: any[]; Transitions?: any[];
NoncurrentVersionTransitions?: any[];
Filter?: Filter; Filter?: Filter;
Prefix?: ''; Prefix?: '';
} = { ID: this.id, Status: this.status }; } = { ID: this.id, Status: this.status };
@ -63,9 +61,6 @@ export default class LifecycleRule {
if (this.transitions) { if (this.transitions) {
rule.Transitions = this.transitions; rule.Transitions = this.transitions;
} }
if (this.ncvTransitions) {
rule.NoncurrentVersionTransitions = this.ncvTransitions;
}
const filter = this.buildFilter(); const filter = this.buildFilter();
@ -178,13 +173,4 @@ export default class LifecycleRule {
this.transitions = transitions; this.transitions = transitions;
return this; return this;
} }
/**
* NonCurrentVersionTransitions
* @param nvcTransitions - NonCurrentVersionTransitions
*/
addNCVTransitions(nvcTransitions) {
this.ncvTransitions = nvcTransitions;
return this;
}
} }

View File

@ -1,4 +1,3 @@
import * as crypto from 'crypto';
import * as constants from '../constants'; import * as constants from '../constants';
import * as VersionIDUtils from '../versioning/VersionID'; import * as VersionIDUtils from '../versioning/VersionID';
import { VersioningConstants } from '../versioning/constants'; import { VersioningConstants } from '../versioning/constants';
@ -6,8 +5,6 @@ import ObjectMDLocation, {
ObjectMDLocationData, ObjectMDLocationData,
Location, Location,
} from './ObjectMDLocation'; } from './ObjectMDLocation';
import ObjectMDAmzRestore from './ObjectMDAmzRestore';
import ObjectMDArchive from './ObjectMDArchive';
export type ACL = { export type ACL = {
Canned: string; Canned: string;
@ -32,7 +29,6 @@ export type ReplicationInfo = {
role: string; role: string;
storageType: string; storageType: string;
dataStoreVersionId: string; dataStoreVersionId: string;
isNFS: boolean | null;
}; };
export type ObjectMDData = { export type ObjectMDData = {
@ -40,26 +36,24 @@ export type ObjectMDData = {
'owner-id': string; 'owner-id': string;
'cache-control': string; 'cache-control': string;
'content-disposition': string; 'content-disposition': string;
'content-language': string;
'content-encoding': string; 'content-encoding': string;
'creation-time'?: string;
'last-modified'?: string; 'last-modified'?: string;
expires: string; expires: string;
'content-length': number; 'content-length': number;
'content-type': string; 'content-type': string;
'content-md5': string; 'content-md5': string;
// simple/no version. will expand once object versioning is
// introduced
'x-amz-version-id': 'null' | string; 'x-amz-version-id': 'null' | string;
'x-amz-server-version-id': string; 'x-amz-server-version-id': string;
'x-amz-restore'?: ObjectMDAmzRestore; // TODO: Handle this as a utility function for all object puts
archive?: ObjectMDArchive; // similar to normalizing request but after checkAuth so
// string to sign is not impacted. This is GH Issue#89.
'x-amz-storage-class': string; 'x-amz-storage-class': string;
'x-amz-server-side-encryption': string; 'x-amz-server-side-encryption': string;
'x-amz-server-side-encryption-aws-kms-key-id': string; 'x-amz-server-side-encryption-aws-kms-key-id': string;
'x-amz-server-side-encryption-customer-algorithm': string; 'x-amz-server-side-encryption-customer-algorithm': string;
'x-amz-website-redirect-location': string; 'x-amz-website-redirect-location': string;
'x-amz-scal-transition-in-progress'?: boolean;
'x-amz-scal-transition-time'?: string;
azureInfo?: any;
acl: ACL; acl: ACL;
key: string; key: string;
location: null | Location[]; location: null | Location[];
@ -79,17 +73,6 @@ export type ObjectMDData = {
replicationInfo: ReplicationInfo; replicationInfo: ReplicationInfo;
dataStoreName: string; dataStoreName: string;
originOp: string; originOp: string;
microVersionId?: string;
// Deletion flag
// Used for keeping object metadata in the oplog event
// In case of a deletion the flag is first updated before
// deleting the object
deleted: boolean;
// PHD flag indicates whether the object is a temporary placeholder.
// This is the case when the latest version of an object gets deleted
// the master is set as a placeholder and gets updated with the new latest
// version data after a certain amount of time.
isPHD: boolean;
}; };
/** /**
@ -118,17 +101,9 @@ export default class ObjectMD {
} else { } else {
this._updateFromParsedJSON(objMd); this._updateFromParsedJSON(objMd);
} }
if (!this._data['creation-time']) {
const lastModified = this.getLastModified();
if (lastModified) {
this.setCreationTime(lastModified);
}
}
} else { } else {
// set newly-created object md modified time to current time // set newly-created object md modified time to current time
const dt = new Date().toJSON(); this._data['last-modified'] = new Date().toJSON();
this.setLastModified(dt);
this.setCreationTime(dt);
} }
// set latest md model version now that we ensured // set latest md model version now that we ensured
// backward-compat conversion // backward-compat conversion
@ -183,8 +158,6 @@ export default class ObjectMD {
'content-length': 0, 'content-length': 0,
'content-type': '', 'content-type': '',
'content-md5': '', 'content-md5': '',
'content-language': '',
'creation-time': undefined,
// simple/no version. will expand once object versioning is // simple/no version. will expand once object versioning is
// introduced // introduced
'x-amz-version-id': 'null', 'x-amz-version-id': 'null',
@ -197,7 +170,6 @@ export default class ObjectMD {
'x-amz-server-side-encryption-aws-kms-key-id': '', 'x-amz-server-side-encryption-aws-kms-key-id': '',
'x-amz-server-side-encryption-customer-algorithm': '', 'x-amz-server-side-encryption-customer-algorithm': '',
'x-amz-website-redirect-location': '', 'x-amz-website-redirect-location': '',
'x-amz-scal-transition-in-progress': false,
acl: { acl: {
Canned: 'private', Canned: 'private',
FULL_CONTROL: [], FULL_CONTROL: [],
@ -207,7 +179,6 @@ export default class ObjectMD {
}, },
key: '', key: '',
location: null, location: null,
azureInfo: undefined,
// versionId, isNull, nullVersionId and isDeleteMarker // versionId, isNull, nullVersionId and isDeleteMarker
// should be undefined when not set explicitly // should be undefined when not set explicitly
isNull: undefined, isNull: undefined,
@ -227,12 +198,9 @@ export default class ObjectMD {
role: '', role: '',
storageType: '', storageType: '',
dataStoreVersionId: '', dataStoreVersionId: '',
isNFS: null,
}, },
dataStoreName: '', dataStoreName: '',
originOp: '', originOp: '',
deleted: false,
isPHD: false,
}; };
} }
@ -462,50 +430,6 @@ export default class ObjectMD {
return this._data['content-md5']; return this._data['content-md5'];
} }
/**
* Set content-language
*
* @param contentLanguage - content-language
* @return itself
*/
setContentLanguage(contentLanguage: string) {
this._data['content-language'] = contentLanguage;
return this;
}
/**
* Returns content-language
*
* @return content-language
*/
getContentLanguage() {
return this._data['content-language'];
}
/**
* Set Creation Date
*
* @param creationTime - Creation Date
* @return itself
*/
setCreationTime(creationTime: string) {
this._data['creation-time'] = creationTime;
return this;
}
/**
* Returns Creation Date
*
* @return Creation Date
*/
getCreationTime() {
// If creation-time is not set fallback to LastModified
if (!this._data['creation-time']) {
return this.getLastModified();
}
return this._data['creation-time'];
}
/** /**
* Set version id * Set version id
* *
@ -646,48 +570,6 @@ export default class ObjectMD {
return this._data['x-amz-website-redirect-location']; return this._data['x-amz-website-redirect-location'];
} }
/**
* Set metadata transition in progress value
*
* @param inProgress - True if transition is in progress, false otherwise
* @param transitionTime - Date when the transition started
* @return itself
*/
setTransitionInProgress(inProgress: false): this
setTransitionInProgress(inProgress: true, transitionTime: Date|string|number): this
setTransitionInProgress(inProgress: boolean, transitionTime?: Date|string|number) {
this._data['x-amz-scal-transition-in-progress'] = inProgress;
if (!inProgress || !transitionTime) {
delete this._data['x-amz-scal-transition-time'];
} else {
if (typeof transitionTime === 'number') {
transitionTime = new Date(transitionTime);
}
if (transitionTime instanceof Date) {
transitionTime = transitionTime.toISOString();
}
this._data['x-amz-scal-transition-time'] = transitionTime;
}
return this;
}
/**
* Get metadata transition in progress value
*
* @return True if transition is in progress, false otherwise
*/
getTransitionInProgress() {
return this._data['x-amz-scal-transition-in-progress'];
}
/**
* Gets the transition time of the object.
* @returns The transition time of the object.
*/
getTransitionTime() {
return this._data['x-amz-scal-transition-time'];
}
/** /**
* Set access control list * Set access control list
* *
@ -793,29 +675,6 @@ export default class ObjectMD {
return reducedLocations; return reducedLocations;
} }
/**
* Set the Azure specific information
* @param azureInfo - a plain JS structure representing the
* Azure specific information for a Blob or a Container (see constructor
* of {@link ObjectMDAzureInfo} for a description of the fields of this
* structure
* @return itself
*/
setAzureInfo(azureInfo: any) {
this._data.azureInfo = azureInfo;
return this;
}
/**
* Get the Azure specific information
* @return a plain JS structure representing the Azure specific
* information for a Blob or a Container an suitable for the constructor
* of {@link ObjectMDAzureInfo}.
*/
getAzureInfo() {
return this._data.azureInfo;
}
/** /**
* Set metadata isNull value * Set metadata isNull value
* *
@ -922,19 +781,6 @@ export default class ObjectMD {
return this._data.isDeleteMarker || false; return this._data.isDeleteMarker || false;
} }
/**
* Get if the object is a multipart upload (MPU)
*
* The function checks the "content-md5" field: if it contains a
* dash ('-') it is a MPU, as the content-md5 string ends with
* "-[nbparts]" for MPUs.
*
* @return Whether object is a multipart upload
*/
isMultipartUpload() {
return this.getContentMd5().includes('-');
}
/** /**
* Set metadata versionId value * Set metadata versionId value
* *
@ -1014,20 +860,6 @@ export default class ObjectMD {
return this._data.tags; return this._data.tags;
} }
getUserMetadata() {
const metaHeaders = {};
const data = this.getValue();
Object.keys(data).forEach(key => {
if (key.startsWith('x-amz-meta-')) {
metaHeaders[key] = data[key];
}
});
if (Object.keys(metaHeaders).length > 0) {
return JSON.stringify(metaHeaders);
}
return undefined;
}
/** /**
* Set replication information * Set replication information
* *
@ -1043,7 +875,6 @@ export default class ObjectMD {
role: string; role: string;
storageType?: string; storageType?: string;
dataStoreVersionId?: string; dataStoreVersionId?: string;
isNFS?: boolean;
}) { }) {
const { const {
status, status,
@ -1054,7 +885,6 @@ export default class ObjectMD {
role, role,
storageType, storageType,
dataStoreVersionId, dataStoreVersionId,
isNFS,
} = replicationInfo; } = replicationInfo;
this._data.replicationInfo = { this._data.replicationInfo = {
status, status,
@ -1065,7 +895,6 @@ export default class ObjectMD {
role, role,
storageType: storageType || '', storageType: storageType || '',
dataStoreVersionId: dataStoreVersionId || '', dataStoreVersionId: dataStoreVersionId || '',
isNFS: isNFS || null,
}; };
return this; return this;
} }
@ -1084,24 +913,6 @@ export default class ObjectMD {
return this; return this;
} }
/**
* Set whether the replication is occurring from an NFS bucket.
* @param isNFS - Whether replication from an NFS bucket
* @return itself
*/
setReplicationIsNFS(isNFS: boolean) {
this._data.replicationInfo.isNFS = isNFS;
return this;
}
/**
* Get whether the replication is occurring from an NFS bucket.
* @return Whether replication from an NFS bucket
*/
getReplicationIsNFS() {
return this._data.replicationInfo.isNFS;
}
setReplicationSiteStatus(site: string, status: string) { setReplicationSiteStatus(site: string, status: string) {
const backend = this._data.replicationInfo.backends.find( const backend = this._data.replicationInfo.backends.find(
(o) => o.site === site (o) => o.site === site
@ -1152,11 +963,6 @@ export default class ObjectMD {
return this; return this;
} }
setReplicationStorageType(storageType: string) {
this._data.replicationInfo.storageType = storageType;
return this;
}
setReplicationStorageClass(storageClass: string) { setReplicationStorageClass(storageClass: string) {
this._data.replicationInfo.storageClass = storageClass; this._data.replicationInfo.storageClass = storageClass;
return this; return this;
@ -1238,9 +1044,6 @@ export default class ObjectMD {
Object.keys(metaHeaders).forEach((key) => { Object.keys(metaHeaders).forEach((key) => {
if (key.startsWith('x-amz-meta-')) { if (key.startsWith('x-amz-meta-')) {
this._data[key] = metaHeaders[key]; this._data[key] = metaHeaders[key];
} else if (key.startsWith('x-ms-meta-')) {
const _key = key.replace('x-ms-meta-', 'x-amz-meta-');
this._data[_key] = metaHeaders[key];
} }
}); });
// If a multipart object and the acl is already parsed, we update it // If a multipart object and the acl is already parsed, we update it
@ -1250,20 +1053,6 @@ export default class ObjectMD {
return this; return this;
} }
/**
* Clear all existing meta headers (used for Azure)
*
* @return itself
*/
clearMetadataValues() {
Object.keys(this._data).forEach(key => {
if (key.startsWith('x-amz-meta')) {
delete this._data[key];
}
});
return this;
}
/** /**
* overrideMetadataValues (used for complete MPU and object copy) * overrideMetadataValues (used for complete MPU and object copy)
* *
@ -1275,38 +1064,6 @@ export default class ObjectMD {
return this; return this;
} }
/**
* Create or update the microVersionId field
*
* This field can be used to force an update in MongoDB. This can
* be needed in the following cases:
*
* - in case no other metadata field changes
*
* - to detect a change when fields change but object version does
* not change e.g. when ingesting a putObjectTagging coming from
* S3C to Zenko
*
* - to manage conflicts during concurrent updates, using
* conditions on the microVersionId field.
*
* It's a field of 16 hexadecimal characters randomly generated
*
* @return itself
*/
updateMicroVersionId() {
this._data.microVersionId = crypto.randomBytes(8).toString('hex');
}
/**
* Get the microVersionId field, or null if not set
*
* @return the microVersionId field if exists, or {null} if it does not exist
*/
getMicroVersionId() {
return this._data.microVersionId || null;
}
/** /**
* Set object legal hold status * Set object legal hold status
* @param legalHold - true if legal hold is 'ON' false if 'OFF' * @param legalHold - true if legal hold is 'ON' false if 'OFF'
@ -1387,98 +1144,4 @@ export default class ObjectMD {
getValue() { getValue() {
return this._data; return this._data;
} }
/**
* Get x-amz-restore
*
* @returns x-amz-restore
*/
getAmzRestore() {
return this._data['x-amz-restore'];
}
/**
* Set x-amz-restore
*
* @param value x-amz-restore object
* @returns itself
* @throws case of invalid parameter
*/
setAmzRestore(value?: ObjectMDAmzRestore) {
if (value) {
// Accept object instance of ObjectMDAmzRestore and Object
if (!(value instanceof ObjectMDAmzRestore) && !ObjectMDAmzRestore.isValid(value)) {
throw new Error('x-amz-restore must be type of ObjectMDAmzRestore.');
}
this._data['x-amz-restore'] = value;
} else {
delete this._data['x-amz-restore'];
}
return this;
}
/**
* Get archive
*
* @returns archive
*/
getArchive() {
return this._data.archive;
}
/**
* Set archive
*
* @param value archive object
* @returns itself
* @throws case of invalid parameter
*/
setArchive(value: ObjectMDArchive) {
if (value) {
// Accept object instance of ObjectMDArchive and Object
if (!(value instanceof ObjectMDArchive) && !ObjectMDArchive.isValid(value)) {
throw new Error('archive is must be type of ObjectMDArchive.');
}
this._data.archive = value;
} else {
delete this._data.archive;
}
return this;
}
/**
* Set deleted flag
* @param {Boolean} value deleted object
* @return {ObjectMD}
*/
setDeleted(value) {
this._data.deleted = value;
return this;
}
/**
* Get deleted flag
* @return {Boolean}
*/
getDeleted() {
return this._data.deleted;
}
/**
* Set isPHD flag
* @param {Boolean} value isPHD value
* @return {ObjectMD}
*/
setIsPHD(value) {
this._data.isPHD = value;
return this;
}
/**
* Get isPHD flag
* @return {Boolean}
*/
getIsPHD() {
return this._data.isPHD;
}
} }

View File

@ -1,94 +0,0 @@
/*
* Code based on Yutaka Oishi (Fujifilm) contributions
* Date: 11 Sep 2020
*/
/**
* class representing the x-amz-restore of object metadata.
*
* @class
*/
export default class ObjectMDAmzRestore {
'expiry-date': Date | string;
'ongoing-request': boolean;
/**
*
* @constructor
* @param ongoingRequest ongoing-request
* @param [expiryDate] expiry-date
* @throws case of invalid parameter
*/
constructor(ongoingRequest: boolean, expiryDate?: Date | string) {
this.setOngoingRequest(ongoingRequest);
this.setExpiryDate(expiryDate);
}
/**
*
* @param data archiveInfo
* @returns true if the provided object is valid
*/
static isValid(data: { 'ongoing-request': boolean; 'expiry-date': Date | string }) {
try {
// eslint-disable-next-line no-new
new ObjectMDAmzRestore(data['ongoing-request'], data['expiry-date']);
return true;
} catch (err) {
return false;
}
}
/**
*
* @returns ongoing-request
*/
getOngoingRequest() {
return this['ongoing-request'];
}
/**
*
* @param value ongoing-request
* @throws case of invalid parameter
*/
setOngoingRequest(value?: boolean) {
if (value === undefined) {
throw new Error('ongoing-request is required.');
} else if (typeof value !== 'boolean') {
throw new Error('ongoing-request must be type of boolean.');
}
this['ongoing-request'] = value;
}
/**
*
* @returns expiry-date
*/
getExpiryDate() {
return this['expiry-date'];
}
/**
*
* @param value expiry-date
* @throws case of invalid parameter
*/
setExpiryDate(value?: Date | string) {
if (value) {
const checkWith = (new Date(value)).getTime();
if (Number.isNaN(Number(checkWith))) {
throw new Error('expiry-date is must be a valid Date.');
}
this['expiry-date'] = value;
}
}
/**
*
* @returns itself
*/
getValue() {
return this;
}
}

View File

@ -1,184 +0,0 @@
/**
* class representing the archive of object metadata.
*
* @class
*/
export default class ObjectMDArchive {
archiveInfo: any;
// @ts-ignore
restoreRequestedAt: Date | string;
// @ts-ignore
restoreRequestedDays: number;
// @ts-ignore
restoreCompletedAt: Date | string;
// @ts-ignore
restoreWillExpireAt: Date | string;
/**
*
* @constructor
* @param archiveInfo contains the archive info set by the TLP and returned by the TLP jobs
* @param [restoreRequestedAt] set at the time restore request is made by the client
* @param [restoreRequestedDays] set at the time restore request is made by the client
* @param [restoreCompletedAt] set at the time of successful restore
* @param [restoreWillExpireAt] computed and stored at the time of restore
* @throws case of invalid parameter
*/
constructor(
archiveInfo: any,
restoreRequestedAt?: Date | string,
restoreRequestedDays?: number,
restoreCompletedAt?: Date | string,
restoreWillExpireAt?: Date | string,
) {
this.setArchiveInfo(archiveInfo);
this.setRestoreRequestedAt(restoreRequestedAt!);
this.setRestoreRequestedDays(restoreRequestedDays!);
this.setRestoreCompletedAt(restoreCompletedAt!);
this.setRestoreWillExpireAt(restoreWillExpireAt!);
}
/**
*
* @param data archiveInfo
* @returns true if the provided object is valid
*/
static isValid(data: {
archiveInfo: any;
restoreRequestedAt?: Date;
restoreRequestedDays?: number;
restoreCompletedAt?: Date;
restoreWillExpireAt?: Date;
}) {
try {
// eslint-disable-next-line no-new
new ObjectMDArchive(
data.archiveInfo,
data.restoreRequestedAt,
data.restoreRequestedDays,
data.restoreCompletedAt,
data.restoreWillExpireAt,
);
return true;
} catch (err) {
return false;
}
}
/**
*
* @returns archiveInfo
*/
getArchiveInfo() {
return this.archiveInfo;
}
/**
* @param value archiveInfo
* @throws case of invalid parameter
*/
setArchiveInfo(value: any) {
if (!value) {
throw new Error('archiveInfo is required.');
} else if (typeof value !== 'object') {
throw new Error('archiveInfo must be type of object.');
}
this.archiveInfo = value;
}
/**
*
* @returns restoreRequestedAt
*/
getRestoreRequestedAt() {
return this.restoreRequestedAt;
}
/**
* @param value restoreRequestedAt
* @throws case of invalid parameter
*/
setRestoreRequestedAt(value: Date | string) {
if (value) {
const checkWith = (new Date(value)).getTime();
if (Number.isNaN(Number(checkWith))) {
throw new Error('restoreRequestedAt must be a valid Date.');
}
this.restoreRequestedAt = value;
}
}
/**
*
* @returns restoreRequestedDays
*/
getRestoreRequestedDays() {
return this.restoreRequestedDays;
}
/**
* @param value restoreRequestedDays
* @throws case of invalid parameter
*/
setRestoreRequestedDays(value: number) {
if (value) {
if (isNaN(value)) {
throw new Error('restoreRequestedDays must be type of Number.');
}
this.restoreRequestedDays = value;
}
}
/**
*
* @returns restoreCompletedAt
*/
getRestoreCompletedAt() {
return this.restoreCompletedAt;
}
/**
* @param value restoreCompletedAt
* @throws case of invalid parameter
*/
setRestoreCompletedAt(value: Date | string) {
if (value) {
if (!this.restoreRequestedAt || !this.restoreRequestedDays) {
throw new Error('restoreCompletedAt must be set after restoreRequestedAt and restoreRequestedDays.');
}
const checkWith = (new Date(value)).getTime();
if (Number.isNaN(Number(checkWith))) {
throw new Error('restoreCompletedAt must be a valid Date.');
}
this.restoreCompletedAt = value;
}
}
/**
*
* @returns restoreWillExpireAt
*/
getRestoreWillExpireAt() {
return this.restoreWillExpireAt;
}
/**
* @param value restoreWillExpireAt
* @throws case of invalid parameter
*/
setRestoreWillExpireAt(value: Date | string) {
if (value) {
if (!this.restoreRequestedAt || !this.restoreRequestedDays) {
throw new Error('restoreWillExpireAt must be set after restoreRequestedAt and restoreRequestedDays.');
}
const checkWith = (new Date(value)).getTime();
if (Number.isNaN(Number(checkWith))) {
throw new Error('restoreWillExpireAt must be a valid Date.');
}
this.restoreWillExpireAt = value;
}
}
/**
*
* @returns itself
*/
getValue() {
return this;
}
}

View File

@ -1,188 +0,0 @@
/**
* Helper class to ease access to the Azure specific information for
* Blob and Container objects.
*/
export default class ObjectMDAzureInfo {
_data: {
containerPublicAccess: string;
containerStoredAccessPolicies: any[];
containerImmutabilityPolicy: any;
containerLegalHoldStatus: boolean;
containerDeletionInProgress: boolean;
blobType: string;
blobContentMD5: string;
blobIssuedETag: string;
blobCopyInfo: any;
blobSequenceNumber: number;
blobAccessTierChangeTime: Date;
blobUncommitted: boolean;
};
/**
* @constructor
* @param obj - Raw structure for the Azure info on Blob/Container
* @param obj.containerPublicAccess - Public access authorization
* type
* @param obj.containerStoredAccessPolicies - Access policies
* for Shared Access Signature bearer
* @param obj.containerImmutabilityPolicy - data immutability
* policy for this container
* @param obj.containerLegalHoldStatus - legal hold status for
* this container
* @param obj.containerDeletionInProgress - deletion in progress
* indicator for this container
* @param obj.blobType - defines the type of blob for this object
* @param obj.blobContentMD5 - whole object MD5 sum set by the
* client through the Azure API
* @param obj.blobIssuedETag - backup of the issued ETag on MD only
* operations like Set Blob Properties and Set Blob Metadata
* @param obj.blobCopyInfo - information pertaining to past and
* pending copy operation targeting this object
* @param obj.blobSequenceNumber - sequence number for a PageBlob
* @param obj.blobAccessTierChangeTime - date of change of tier
* @param obj.blobUncommitted - A block has been put for a
* nonexistent blob which is about to be created
*/
constructor(obj: {
containerPublicAccess: string;
containerStoredAccessPolicies: any[];
containerImmutabilityPolicy: any;
containerLegalHoldStatus: boolean;
containerDeletionInProgress: boolean;
blobType: string;
blobContentMD5: string;
blobIssuedETag: string;
blobCopyInfo: any;
blobSequenceNumber: number;
blobAccessTierChangeTime: Date;
blobUncommitted: boolean;
}) {
this._data = {
containerPublicAccess: obj.containerPublicAccess,
containerStoredAccessPolicies: obj.containerStoredAccessPolicies,
containerImmutabilityPolicy: obj.containerImmutabilityPolicy,
containerLegalHoldStatus: obj.containerLegalHoldStatus,
containerDeletionInProgress: obj.containerDeletionInProgress,
blobType: obj.blobType,
blobContentMD5: obj.blobContentMD5,
blobIssuedETag: obj.blobIssuedETag,
blobCopyInfo: obj.blobCopyInfo,
blobSequenceNumber: obj.blobSequenceNumber,
blobAccessTierChangeTime: obj.blobAccessTierChangeTime,
blobUncommitted: obj.blobUncommitted,
};
}
getContainerPublicAccess() {
return this._data.containerPublicAccess;
}
setContainerPublicAccess(containerPublicAccess: string) {
this._data.containerPublicAccess = containerPublicAccess;
return this;
}
getContainerStoredAccessPolicies() {
return this._data.containerStoredAccessPolicies;
}
setContainerStoredAccessPolicies(containerStoredAccessPolicies: any[]) {
this._data.containerStoredAccessPolicies =
containerStoredAccessPolicies;
return this;
}
getContainerImmutabilityPolicy() {
return this._data.containerImmutabilityPolicy;
}
setContainerImmutabilityPolicy(containerImmutabilityPolicy: any) {
this._data.containerImmutabilityPolicy = containerImmutabilityPolicy;
return this;
}
getContainerLegalHoldStatus() {
return this._data.containerLegalHoldStatus;
}
setContainerLegalHoldStatus(containerLegalHoldStatus: boolean) {
this._data.containerLegalHoldStatus = containerLegalHoldStatus;
return this;
}
getContainerDeletionInProgress() {
return this._data.containerDeletionInProgress;
}
setContainerDeletionInProgress(containerDeletionInProgress: boolean) {
this._data.containerDeletionInProgress = containerDeletionInProgress;
return this;
}
getBlobType() {
return this._data.blobType;
}
setBlobType(blobType: string) {
this._data.blobType = blobType;
return this;
}
getBlobContentMD5() {
return this._data.blobContentMD5;
}
setBlobContentMD5(blobContentMD5: string) {
this._data.blobContentMD5 = blobContentMD5;
return this;
}
getBlobIssuedETag() {
return this._data.blobIssuedETag;
}
setBlobIssuedETag(blobIssuedETag: string) {
this._data.blobIssuedETag = blobIssuedETag;
return this;
}
getBlobCopyInfo() {
return this._data.blobCopyInfo;
}
setBlobCopyInfo(blobCopyInfo: any) {
this._data.blobCopyInfo = blobCopyInfo;
return this;
}
getBlobSequenceNumber() {
return this._data.blobSequenceNumber;
}
setBlobSequenceNumber(blobSequenceNumber: number) {
this._data.blobSequenceNumber = blobSequenceNumber;
return this;
}
getBlobAccessTierChangeTime() {
return this._data.blobAccessTierChangeTime;
}
setBlobAccessTierChangeTime(blobAccessTierChangeTime: Date) {
this._data.blobAccessTierChangeTime = blobAccessTierChangeTime;
return this;
}
getBlobUncommitted() {
return this._data.blobUncommitted;
}
setBlobUncommitted(blobUncommitted: boolean) {
this._data.blobUncommitted = blobUncommitted;
return this;
}
getValue() {
return this._data;
}
}

View File

@ -5,7 +5,6 @@ export type Location = BaseLocation & {
size: number; size: number;
dataStoreETag: string; dataStoreETag: string;
dataStoreVersionId: string; dataStoreVersionId: string;
blockId?: string;
}; };
export type ObjectMDLocationData = { export type ObjectMDLocationData = {
key: string; key: string;
@ -13,8 +12,6 @@ export type ObjectMDLocationData = {
size: number; size: number;
dataStoreName: string; dataStoreName: string;
dataStoreETag: string; dataStoreETag: string;
dataStoreVersionId: string;
blockId?: string;
cryptoScheme?: number; cryptoScheme?: number;
cipheredDataKey?: string; cipheredDataKey?: string;
}; };
@ -34,14 +31,10 @@ export default class ObjectMDLocation {
* @param locationObj.dataStoreName - type of data store * @param locationObj.dataStoreName - type of data store
* @param locationObj.dataStoreETag - internal ETag of * @param locationObj.dataStoreETag - internal ETag of
* data part * data part
* @param [locationObj.dataStoreVersionId] - versionId,
* needed for cloud backends
* @param [location.cryptoScheme] - if location data is * @param [location.cryptoScheme] - if location data is
* encrypted: the encryption scheme version * encrypted: the encryption scheme version
* @param [location.cipheredDataKey] - if location data * @param [location.cipheredDataKey] - if location data
* is encrypted: the base64-encoded ciphered data key * is encrypted: the base64-encoded ciphered data key
* @param [locationObj.blockId] - blockId of the part,
* set by the Azure Blob Service REST API frontend
*/ */
constructor(locationObj: Location | (Location & Ciphered)) { constructor(locationObj: Location | (Location & Ciphered)) {
this._data = { this._data = {
@ -50,8 +43,6 @@ export default class ObjectMDLocation {
size: locationObj.size, size: locationObj.size,
dataStoreName: locationObj.dataStoreName, dataStoreName: locationObj.dataStoreName,
dataStoreETag: locationObj.dataStoreETag, dataStoreETag: locationObj.dataStoreETag,
dataStoreVersionId: locationObj.dataStoreVersionId,
blockId: locationObj.blockId,
}; };
if ('cryptoScheme' in locationObj) { if ('cryptoScheme' in locationObj) {
this._data.cryptoScheme = locationObj.cryptoScheme; this._data.cryptoScheme = locationObj.cryptoScheme;
@ -73,7 +64,6 @@ export default class ObjectMDLocation {
* @param location - single data location info * @param location - single data location info
* @param location.key - data backend key * @param location.key - data backend key
* @param location.dataStoreName - type of data store * @param location.dataStoreName - type of data store
* @param [location.dataStoreVersionId] - data backend version ID
* @param [location.cryptoScheme] - if location data is * @param [location.cryptoScheme] - if location data is
* encrypted: the encryption scheme version * encrypted: the encryption scheme version
* @param [location.cipheredDataKey] - if location data * @param [location.cipheredDataKey] - if location data
@ -81,19 +71,15 @@ export default class ObjectMDLocation {
* @return return this * @return return this
*/ */
setDataLocation(location: BaseLocation | (BaseLocation & Ciphered)) { setDataLocation(location: BaseLocation | (BaseLocation & Ciphered)) {
[ ['key', 'dataStoreName', 'cryptoScheme', 'cipheredDataKey'].forEach(
'key', (attrName) => {
'dataStoreName',
'dataStoreVersionId',
'cryptoScheme',
'cipheredDataKey',
].forEach(attrName => {
if (location[attrName] !== undefined) { if (location[attrName] !== undefined) {
this._data[attrName] = location[attrName]; this._data[attrName] = location[attrName];
} else { } else {
delete this._data[attrName]; delete this._data[attrName];
} }
}); }
);
return this; return this;
} }
@ -101,10 +87,6 @@ export default class ObjectMDLocation {
return this._data.dataStoreETag; return this._data.dataStoreETag;
} }
getDataStoreVersionId() {
return this._data.dataStoreVersionId;
}
getPartNumber() { getPartNumber() {
return Number.parseInt(this._data.dataStoreETag.split(':')[0], 10); return Number.parseInt(this._data.dataStoreETag.split(':')[0], 10);
} }
@ -139,15 +121,6 @@ export default class ObjectMDLocation {
return this._data.cipheredDataKey; return this._data.cipheredDataKey;
} }
getBlockId() {
return this._data.blockId;
}
setBlockId(blockId: string) {
this._data.blockId = blockId;
return this;
}
getValue() { getValue() {
return this._data; return this._data;
} }

View File

@ -64,8 +64,7 @@ export default class ReplicationConfiguration {
_destination: string | null; _destination: string | null;
_rules: Rule[] | null; _rules: Rule[] | null;
_prevStorageClass: null; _prevStorageClass: null;
_hasScalityDestination: boolean | null; _hasScalityDestination: boolean;
_preferredReadLocation: string | null;
/** /**
* Create a ReplicationConfiguration instance * Create a ReplicationConfiguration instance
@ -87,8 +86,7 @@ export default class ReplicationConfiguration {
this._destination = null; this._destination = null;
this._rules = null; this._rules = null;
this._prevStorageClass = null; this._prevStorageClass = null;
this._hasScalityDestination = null; this._hasScalityDestination = false;
this._preferredReadLocation = null;
} }
/** /**
@ -115,18 +113,6 @@ export default class ReplicationConfiguration {
return this._rules; return this._rules;
} }
/**
* The preferred read location
* @return {string|null} - The preferred read location if defined,
* otherwise null
*
* FIXME ideally we should be able to specify one preferred read
* location for each rule
*/
getPreferredReadLocation() {
return this._preferredReadLocation;
}
/** /**
* Get the replication configuration * Get the replication configuration
* @return - The replication configuration * @return - The replication configuration
@ -136,7 +122,6 @@ export default class ReplicationConfiguration {
role: this.getRole(), role: this.getRole(),
destination: this.getDestination(), destination: this.getDestination(),
rules: this.getRules(), rules: this.getRules(),
preferredReadLocation: this.getPreferredReadLocation(),
}; };
} }
@ -343,15 +328,7 @@ export default class ReplicationConfiguration {
return undefined; return undefined;
} }
const storageClasses = destination.StorageClass[0].split(','); const storageClasses = destination.StorageClass[0].split(',');
const prefReadIndex = storageClasses.findIndex(storageClass => const isValidStorageClass = storageClasses.every((storageClass) => {
storageClass.endsWith(':preferred_read'));
if (prefReadIndex !== -1) {
const prefRead = storageClasses[prefReadIndex].split(':')[0];
// remove :preferred_read tag from storage class name
storageClasses[prefReadIndex] = prefRead;
this._preferredReadLocation = prefRead;
}
const isValidStorageClass = storageClasses.every(storageClass => {
if (validStorageClasses.includes(storageClass)) { if (validStorageClasses.includes(storageClass)) {
this._hasScalityDestination = this._hasScalityDestination =
defaultEndpoint.type === undefined; defaultEndpoint.type === undefined;
@ -361,11 +338,6 @@ export default class ReplicationConfiguration {
(endpoint: any) => endpoint.site === storageClass (endpoint: any) => endpoint.site === storageClass
); );
if (endpoint) { if (endpoint) {
// We do not support replication to cold location.
// Only transition to cold location is supported.
if (endpoint.site && this._config.locationConstraints[endpoint.site]?.isCold) {
return false;
}
// If this._hasScalityDestination was not set to true in any // If this._hasScalityDestination was not set to true in any
// previous iteration or by a prior rule's storage class, then // previous iteration or by a prior rule's storage class, then
// check if the current endpoint is a Scality destination. // check if the current endpoint is a Scality destination.

View File

@ -1,16 +1,11 @@
export { default as ARN } from './ARN'; export { default as ARN } from './ARN';
export { default as BackendInfo } from './BackendInfo';
export { default as BucketAzureInfo } from './BucketAzureInfo';
export { default as BucketInfo } from './BucketInfo'; export { default as BucketInfo } from './BucketInfo';
export { default as BucketPolicy } from './BucketPolicy'; export { default as ObjectMD } from './ObjectMD';
export { default as ObjectMDLocation } from './ObjectMDLocation';
export * as WebsiteConfiguration from './WebsiteConfiguration';
export { default as ReplicationConfiguration } from './ReplicationConfiguration';
export { default as LifecycleConfiguration } from './LifecycleConfiguration'; export { default as LifecycleConfiguration } from './LifecycleConfiguration';
export { default as LifecycleRule } from './LifecycleRule'; export { default as LifecycleRule } from './LifecycleRule';
export { default as NotificationConfiguration } from './NotificationConfiguration'; export { default as BucketPolicy } from './BucketPolicy';
export { default as ObjectLockConfiguration } from './ObjectLockConfiguration'; export { default as ObjectLockConfiguration } from './ObjectLockConfiguration';
export { default as ObjectMD } from './ObjectMD'; export { default as NotificationConfiguration } from './NotificationConfiguration';
export { default as ObjectMDAmzRestore } from './ObjectMDAmzRestore';
export { default as ObjectMDArchive } from './ObjectMDArchive';
export { default as ObjectMDAzureInfo } from './ObjectMDAzureInfo';
export { default as ObjectMDLocation } from './ObjectMDLocation';
export { default as ReplicationConfiguration } from './ReplicationConfiguration';
export * as WebsiteConfiguration from './WebsiteConfiguration';

View File

@ -1,6 +1,5 @@
import * as http from 'http'; import * as http from 'http';
import * as https from 'https'; import * as https from 'https';
import { https as HttpsAgent } from 'httpagent';
import * as tls from 'tls'; import * as tls from 'tls';
import * as net from 'net'; import * as net from 'net';
import assert from 'assert'; import assert from 'assert';
@ -372,8 +371,6 @@ export default class Server {
error: err.stack || err, error: err.stack || err,
address: sock.address(), address: sock.address(),
}); });
// socket is not systematically destroyed
sock.destroy();
} }
/** /**
@ -410,11 +407,7 @@ export default class Server {
method: 'arsenal.network.Server.start', method: 'arsenal.network.Server.start',
port: this._port, port: this._port,
}); });
this._https.agent = new HttpsAgent.Agent(this._https, { this._https.agent = new https.Agent(this._https);
// Do not enforce the maximum number of sockets for the
// main server, as it might be able to serve more clients.
maxSockets: false,
});
this._server = https.createServer(this._https, this._server = https.createServer(this._https,
(req, res) => this._onRequest(req, res)); (req, res) => this._onRequest(req, res));
} else { } else {
@ -435,6 +428,7 @@ export default class Server {
this._server.on('connection', sock => { this._server.on('connection', sock => {
// Setting no delay of the socket to the value configured // Setting no delay of the socket to the value configured
// TODO fix this // TODO fix this
// @ts-expect-errors
sock.setNoDelay(this.isNoDelay()); sock.setNoDelay(this.isNoDelay());
sock.on('error', err => this._logger.info( sock.on('error', err => this._logger.info(
'socket error - request rejected', { error: err })); 'socket error - request rejected', { error: err }));

View File

@ -3,12 +3,10 @@ import * as utils from './http/utils';
import RESTServer from './rest/RESTServer'; import RESTServer from './rest/RESTServer';
import RESTClient from './rest/RESTClient'; import RESTClient from './rest/RESTClient';
import * as ProbeServer from './probe/ProbeServer'; import * as ProbeServer from './probe/ProbeServer';
import HealthProbeServer from './probe/HealthProbeServer';
import * as Utils from './probe/Utils';
export const http = { server, utils }; export const http = { server, utils };
export const rest = { RESTServer, RESTClient }; export const rest = { RESTServer, RESTClient };
export const probe = { ProbeServer, HealthProbeServer, Utils }; export const probe = { ProbeServer };
export { default as RoundRobin } from './RoundRobin'; export { default as RoundRobin } from './RoundRobin';
export { default as kmip } from './kmip'; export { default as kmip } from './kmip';

View File

@ -1,94 +0,0 @@
import * as http from 'http';
import httpServer from '../http/server';
import * as werelogs from 'werelogs';
import errors from '../../errors';
import ZenkoMetrics from '../../metrics/ZenkoMetrics';
import { sendSuccess, sendError } from './Utils';
function checkStub(_log: any) {
// eslint-disable-line
return true;
}
export default class HealthProbeServer extends httpServer {
logging: werelogs.Logger;
_reqHandlers: { [key: string]: any };
_livenessCheck: (log: any) => boolean;
_readinessCheck: (log: any) => boolean;
constructor(params: {
port: number;
bindAddress: string;
livenessCheck?: (log: any) => boolean;
readinessCheck?: (log: any) => boolean;
}) {
const logging = new werelogs.Logger('HealthProbeServer');
super(params.port, logging);
this.logging = logging;
this.setBindAddress(params.bindAddress || 'localhost');
// hooking our request processing function by calling the
// parent's method for that
this.onRequest(this._onRequest);
this._reqHandlers = {
'/_/health/liveness': this._onLiveness.bind(this),
'/_/health/readiness': this._onReadiness.bind(this),
'/_/monitoring/metrics': this._onMetrics.bind(this),
};
this._livenessCheck = params.livenessCheck || checkStub;
this._readinessCheck = params.readinessCheck || checkStub;
}
onLiveCheck(f: (log: any) => boolean) {
this._livenessCheck = f;
}
onReadyCheck(f: (log: any) => boolean) {
this._readinessCheck = f;
}
_onRequest(req: http.IncomingMessage, res: http.ServerResponse) {
const log = this.logging.newRequestLogger();
log.debug('request received', { method: req.method, url: req.url });
if (req.method !== 'GET') {
sendError(res, log, errors.MethodNotAllowed);
} else if (req.url && req.url in this._reqHandlers) {
this._reqHandlers[req.url](req, res, log);
} else {
sendError(res, log, errors.InvalidURI);
}
}
_onLiveness(
_req: http.IncomingMessage,
res: http.ServerResponse,
log: werelogs.RequestLogger,
) {
if (this._livenessCheck(log)) {
sendSuccess(res, log);
} else {
sendError(res, log, errors.ServiceUnavailable);
}
}
_onReadiness(
_req: http.IncomingMessage,
res: http.ServerResponse,
log: werelogs.RequestLogger,
) {
if (this._readinessCheck(log)) {
sendSuccess(res, log);
} else {
sendError(res, log, errors.ServiceUnavailable);
}
}
// expose metrics to Prometheus
async _onMetrics(_req: http.IncomingMessage, res: http.ServerResponse) {
const metrics = await ZenkoMetrics.asPrometheus();
res.writeHead(200, {
'Content-Type': ZenkoMetrics.asPrometheusContentType(),
});
res.end(metrics);
}
}

View File

@ -4,16 +4,19 @@ import * as werelogs from 'werelogs';
import errors from '../../errors'; import errors from '../../errors';
export const DEFAULT_LIVE_ROUTE = '/_/live'; export const DEFAULT_LIVE_ROUTE = '/_/live';
export const DEFAULT_READY_ROUTE = '/_/ready'; export const DEFAULT_READY_ROUTE = '/_/live';
export const DEFAULT_METRICS_ROUTE = '/metrics'; export const DEFAULT_METRICS_ROUTE = '/_/metrics';
/** /**
* ProbeDelegate is used to handle probe checks. * ProbeDelegate is used to determine if a probe is successful or
* You can sendSuccess and sendError from Utils to handle success * if any errors are present.
* and failure conditions. * If everything is working as intended, it is a no-op.
* Otherwise, return a string representing what is failing.
* @callback ProbeDelegate * @callback ProbeDelegate
* @param res - HTTP response for writing * @param res - HTTP response for writing
* @param log - Werelogs instance for logging if you choose to * @param log - Werelogs instance for logging if you choose to
* @return String representing issues to report. An empty
* string or undefined is used to represent no issues.
*/ */
export type ProbeDelegate = (res: http.ServerResponse, log: werelogs.RequestLogger) => string | void export type ProbeDelegate = (res: http.ServerResponse, log: werelogs.RequestLogger) => string | void
@ -87,6 +90,6 @@ export class ProbeServer extends httpServer {
return; return;
} }
this._handlers.get(req.url ?? '')?.(res, log); this._handlers.get(req.url!)!(res, log);
} }
} }

View File

@ -1,49 +0,0 @@
import * as http from 'http';
import { RequestLogger } from 'werelogs';
import { ArsenalError } from '../../errors';
/**
* Send a successful HTTP response of 200 OK
* @param res - HTTP response for writing
* @param log - Werelogs instance for logging if you choose to
* @param [message] - Message to send as response, defaults to OK
*/
export function sendSuccess(
res: http.ServerResponse,
log: RequestLogger,
message = 'OK'
) {
log.debug('replying with success');
res.writeHead(200);
res.end(message);
}
/**
* Send an Arsenal Error response
* @param res - HTTP response for writing
* @param log - Werelogs instance for logging if you choose to
* @param error - Error to send back to the user
* @param [optMessage] - Message to use instead of the errors message
*/
export function sendError(
res: http.ServerResponse,
log: RequestLogger,
error: ArsenalError,
optMessage?: string
) {
const message = optMessage || error.description || '';
log.debug('sending back error response', {
httpCode: error.code,
errorType: error.message,
error: message,
});
res.writeHead(error.code);
res.end(
JSON.stringify({
errorType: error.message,
errorMessage: message,
})
);
}

View File

@ -4,7 +4,7 @@ import * as werelogs from 'werelogs';
import * as constants from '../../constants'; import * as constants from '../../constants';
import * as utils from './utils'; import * as utils from './utils';
import errors, { ArsenalError } from '../../errors'; import errors, { ArsenalError } from '../../errors';
import { http as HttpAgent } from 'httpagent'; import HttpAgent from 'agentkeepalive';
import * as stream from 'stream'; import * as stream from 'stream';
function setRequestUids(reqHeaders: http.IncomingHttpHeaders, reqUids: string) { function setRequestUids(reqHeaders: http.IncomingHttpHeaders, reqUids: string) {
@ -71,9 +71,8 @@ function makeErrorFromHTTPResponse(response: http.IncomingMessage) {
export default class RESTClient { export default class RESTClient {
host: string; host: string;
port: number; port: number;
httpAgent: http.Agent; httpAgent: HttpAgent;
logging: werelogs.Logger; logging: werelogs.Logger;
isPassthrough: boolean;
/** /**
* Interface to the data file server * Interface to the data file server
@ -89,19 +88,17 @@ export default class RESTClient {
host: string; host: string;
port: number; port: number;
logApi: { Logger: typeof werelogs.Logger }; logApi: { Logger: typeof werelogs.Logger };
isPassthrough?: boolean;
}) { }) {
assert(params.host); assert(params.host);
assert(params.port); assert(params.port);
this.host = params.host; this.host = params.host;
this.port = params.port; this.port = params.port;
this.isPassthrough = params.isPassthrough || false;
this.logging = new (params.logApi || werelogs).Logger('DataFileRESTClient'); this.logging = new (params.logApi || werelogs).Logger('DataFileRESTClient');
this.httpAgent = new HttpAgent.Agent({ this.httpAgent = new HttpAgent({
keepAlive: true, keepAlive: true,
freeSocketTimeout: constants.httpClientFreeSocketTimeout, freeSocketTimeout: constants.httpClientFreeSocketTimeout,
}) as http.Agent; });
} }
/** Destroy the HTTP agent, forcing a close of the remaining open connections */ /** Destroy the HTTP agent, forcing a close of the remaining open connections */
@ -124,13 +121,11 @@ export default class RESTClient {
) { ) {
const reqHeaders = headers || {}; const reqHeaders = headers || {};
const urlKey = key || ''; const urlKey = key || '';
const prefix = this.isPassthrough ?
constants.passthroughFileURL : constants.dataFileURL;
const reqParams = { const reqParams = {
hostname: this.host, hostname: this.host,
port: this.port, port: this.port,
method, method,
path: encodeURI(`${prefix}/${urlKey}`), path: `${constants.dataFileURL}/${urlKey}`,
headers: reqHeaders, headers: reqHeaders,
agent: this.httpAgent, agent: this.httpAgent,
}; };

View File

@ -4,7 +4,7 @@ import * as werelogs from 'werelogs';
import * as http from 'http'; import * as http from 'http';
import httpServer from '../http/server'; import httpServer from '../http/server';
import * as constants from '../../constants'; import * as constants from '../../constants';
import { parseURL } from './utils'; import * as utils from './utils';
import * as httpUtils from '../http/utils'; import * as httpUtils from '../http/utils';
import errors, { ArsenalError } from '../../errors'; import errors, { ArsenalError } from '../../errors';
@ -38,6 +38,42 @@ function sendError(
errorMessage: message })}\n`); errorMessage: message })}\n`);
} }
/**
* Parse the given url and return a pathInfo object. Sanity checks are
* performed.
*
* @param urlStr - URL to parse
* @param expectKey - whether the command expects to see a
* key in the URL
* @return a pathInfo object with URL items containing the
* following attributes:
* - pathInfo.service {String} - The name of REST service ("DataFile")
* - pathInfo.key {String} - The requested key
*/
function parseURL(urlStr: string, expectKey: boolean) {
const urlObj = url.parse(urlStr);
const pathInfo = utils.explodePath(urlObj.path!);
if (pathInfo.service !== constants.dataFileURL) {
throw errors.InvalidAction.customizeDescription(
`unsupported service '${pathInfo.service}'`);
}
if (expectKey && pathInfo.key === undefined) {
throw errors.MissingParameter.customizeDescription(
'URL is missing key');
}
if (!expectKey && pathInfo.key !== undefined) {
// note: we may implement rewrite functionality by allowing a
// key in the URL, though we may still provide the new key in
// the Location header to keep immutability property and
// atomicity of the update (we would just remove the old
// object when the new one has been written entirely in this
// case, saving a request over an equivalent PUT + DELETE).
throw errors.InvalidURI.customizeDescription(
'PUT url cannot contain a key');
}
return pathInfo;
}
/** /**
* @class * @class
* @classdesc REST Server interface * @classdesc REST Server interface

View File

@ -1,16 +1,6 @@
import errors from '../../errors'; import errors from '../../errors';
import * as constants from '../../constants';
import * as url from 'url';
const passthroughPrefixLength = constants.passthroughFileURL.length;
export function explodePath(path: string) { export function explodePath(path: string) {
if (path.startsWith(constants.passthroughFileURL)) {
const key = path.slice(passthroughPrefixLength + 1);
return {
service: constants.passthroughFileURL,
key: key.length > 0 ? key : undefined,
};
}
const pathMatch = /^(\/[a-zA-Z0-9]+)(\/([0-9a-f]*))?$/.exec(path); const pathMatch = /^(\/[a-zA-Z0-9]+)(\/([0-9a-f]*))?$/.exec(path);
if (pathMatch) { if (pathMatch) {
return { return {
@ -20,41 +10,4 @@ export function explodePath(path: string) {
}; };
} }
throw errors.InvalidURI.customizeDescription('malformed URI'); throw errors.InvalidURI.customizeDescription('malformed URI');
} };
/**
* Parse the given url and return a pathInfo object. Sanity checks are
* performed.
*
* @param urlStr - URL to parse
* @param expectKey - whether the command expects to see a
* key in the URL
* @return a pathInfo object with URL items containing the
* following attributes:
* - pathInfo.service {String} - The name of REST service ("DataFile")
* - pathInfo.key {String} - The requested key
*/
export function parseURL(urlStr: string, expectKey: boolean) {
const urlObj = url.parse(urlStr);
const pathInfo = explodePath(decodeURI(urlObj.path!));
if ((pathInfo.service !== constants.dataFileURL)
&& (pathInfo.service !== constants.passthroughFileURL)) {
throw errors.InvalidAction.customizeDescription(
`unsupported service '${pathInfo.service}'`);
}
if (expectKey && pathInfo.key === undefined) {
throw errors.MissingParameter.customizeDescription(
'URL is missing key');
}
if (!expectKey && pathInfo.key !== undefined) {
// note: we may implement rewrite functionality by allowing a
// key in the URL, though we may still provide the new key in
// the Location header to keep immutability property and
// atomicity of the update (we would just remove the old
// object when the new one has been written entirely in this
// case, saving a request over an equivalent PUT + DELETE).
throw errors.InvalidURI.customizeDescription(
'PUT url cannot contain a key');
}
return pathInfo;
}

View File

@ -1,209 +0,0 @@
import { URL } from 'url';
import { decryptSecret } from '../executables/pensieveCreds/utils';
import { Logger } from 'werelogs';
export type LocationType =
| 'location-mem-v1'
| 'location-file-v1'
| 'location-azure-v1'
| 'location-ceph-radosgw-s3-v1'
| 'location-scality-ring-s3-v1'
| 'location-aws-s3-v1'
| 'location-wasabi-v1'
| 'location-do-spaces-v1'
| 'location-gcp-v1'
| 'location-scality-sproxyd-v1'
| 'location-nfs-mount-v1'
| 'location-scality-hdclient-v2';
export interface OverlayLocations {
[key: string]: {
name: string;
objectId: string;
details?: any;
locationType: string;
sizeLimitGB?: number;
isTransient?: boolean;
legacyAwsBehavior?: boolean;
};
}
export type Location = {
type:
| 'mem'
| 'file'
| 'azure'
| 'aws_s3'
| 'gcp'
| 'scality'
| 'pfs'
| 'scality';
name: string;
objectId: string;
details: { [key: string]: any };
locationType: string;
sizeLimitGB: number | null;
isTransient: boolean;
legacyAwsBehavior: boolean;
};
export function patchLocations(
overlayLocations: OverlayLocations | undefined | null,
creds: any,
log: Logger
) {
const locs = overlayLocations ?? {};
return Object.entries(locs).reduce<{ [key: string]: Location }>(
(acc, [k, l]) => {
const location: Location = {
type: 'mem',
name: k,
objectId: l.objectId,
details: l.details || {},
locationType: l.locationType,
sizeLimitGB: l.sizeLimitGB || null,
isTransient: Boolean(l.isTransient),
legacyAwsBehavior: Boolean(l.legacyAwsBehavior),
};
let supportsVersioning = false;
let pathStyle = process.env.CI_CEPH !== undefined;
switch (l.locationType) {
case 'location-mem-v1':
location.type = 'mem';
location.details = { supportsVersioning: true };
break;
case 'location-file-v1':
location.type = 'file';
location.details = { supportsVersioning: true };
break;
case 'location-azure-v1':
location.type = 'azure';
if (l.details.secretKey && l.details.secretKey.length > 0) {
location.details = {
bucketMatch: l.details.bucketMatch,
azureStorageEndpoint: l.details.endpoint,
azureStorageAccountName: l.details.accessKey,
azureStorageAccessKey: decryptSecret(
creds,
l.details.secretKey
),
azureContainerName: l.details.bucketName,
};
}
break;
case 'location-ceph-radosgw-s3-v1':
case 'location-scality-ring-s3-v1':
pathStyle = true; // fallthrough
case 'location-aws-s3-v1':
case 'location-wasabi-v1':
supportsVersioning = true; // fallthrough
case 'location-do-spaces-v1':
location.type = 'aws_s3';
if (l.details.secretKey && l.details.secretKey.length > 0) {
let https = true;
let awsEndpoint =
l.details.endpoint || 's3.amazonaws.com';
if (awsEndpoint.includes('://')) {
const url = new URL(awsEndpoint);
awsEndpoint = url.host;
https = url.protocol.includes('https');
}
location.details = {
credentials: {
accessKey: l.details.accessKey,
secretKey: decryptSecret(
creds,
l.details.secretKey
),
},
bucketName: l.details.bucketName,
bucketMatch: l.details.bucketMatch,
serverSideEncryption: Boolean(
l.details.serverSideEncryption
),
region: l.details.region,
awsEndpoint,
supportsVersioning,
pathStyle,
https,
};
}
break;
case 'location-gcp-v1':
location.type = 'gcp';
if (l.details.secretKey && l.details.secretKey.length > 0) {
location.details = {
credentials: {
accessKey: l.details.accessKey,
secretKey: decryptSecret(
creds,
l.details.secretKey
),
},
bucketName: l.details.bucketName,
mpuBucketName: l.details.mpuBucketName,
bucketMatch: l.details.bucketMatch,
gcpEndpoint:
l.details.endpoint || 'storage.googleapis.com',
https: true,
};
}
break;
case 'location-scality-sproxyd-v1':
location.type = 'scality';
if (
l.details &&
l.details.bootstrapList &&
l.details.proxyPath
) {
location.details = {
supportsVersioning: true,
connector: {
sproxyd: {
chordCos: l.details.chordCos || null,
bootstrap: l.details.bootstrapList,
path: l.details.proxyPath,
},
},
};
}
break;
case 'location-nfs-mount-v1':
location.type = 'pfs';
if (l.details) {
location.details = {
supportsVersioning: true,
bucketMatch: true,
pfsDaemonEndpoint: {
host: `${l.name}-cosmos-pfsd`,
port: 80,
},
};
}
break;
case 'location-scality-hdclient-v2':
location.type = 'scality';
if (l.details && l.details.bootstrapList) {
location.details = {
supportsVersioning: true,
connector: {
hdclient: {
bootstrap: l.details.bootstrapList,
},
},
};
}
break;
default:
log.info('unknown location type', {
locationType: l.locationType,
});
return acc;
}
return { ...acc, [location.name]: location };
},
{}
);
}

View File

@ -38,10 +38,6 @@
"type": "string", "type": "string",
"pattern": "^arn:aws:iam::[0-9]{12}:saml-provider/[\\w._-]{1,128}$" "pattern": "^arn:aws:iam::[0-9]{12}:saml-provider/[\\w._-]{1,128}$"
}, },
"principalFederatedOidcIdp": {
"type": "string",
"pattern": "^(?:http(s)?:\/\/)?[\\w.-]+(?:\\.[\\w\\.-]+)+[\\w\\-\\._~:/?#[\\]@!\\$&'\\(\\)\\*\\+,;=.]+$"
},
"principalAWSItem": { "principalAWSItem": {
"type": "object", "type": "object",
"properties": { "properties": {
@ -102,9 +98,6 @@
"oneOf": [ "oneOf": [
{ {
"$ref": "#/definitions/principalFederatedSamlIdp" "$ref": "#/definitions/principalFederatedSamlIdp"
},
{
"$ref": "#/definitions/principalFederatedOidcIdp"
} }
] ]
} }

View File

@ -15,36 +15,11 @@ import {
actionMapScuba, actionMapScuba,
} from './utils/actionMaps'; } from './utils/actionMaps';
export const actionNeedQuotaCheck = { const _actionNeedQuotaCheck = {
objectPut: true, objectPut: true,
objectPutVersion: true,
objectPutPart: true, objectPutPart: true,
objectRestore: true,
}; };
/**
* This variable describes APIs that change the bytes
* stored, requiring quota updates
*/
export const actionWithDataDeletion = {
objectDelete: true,
objectDeleteVersion: true,
multipartDelete: true,
multiObjectDelete: true,
};
/**
* The function returns true if the current API call is a copy object
* and the action requires a quota evaluation logic, post retrieval
* of the object metadata.
* @param {string} action - the action being performed
* @param {string} currentApi - the current API being called
* @return {boolean} - whether the action requires a quota check
*/
export function actionNeedQuotaCheckCopy(action: string, currentApi: string) {
return action === 'objectGet' && (currentApi === 'objectCopy' || currentApi === 'objectPutCopyPart');
}
function _findAction(service: string, method: string) { function _findAction(service: string, method: string) {
switch (service) { switch (service) {
case 's3': case 's3':
@ -151,9 +126,7 @@ export type RequesterInfo = {
principalType: string; principalType: string;
principaltype: string; principaltype: string;
userid: string; userid: string;
username: string; username: string,
keycloakGroup: string;
keycloakRole: string;
} }
/** /**
@ -256,8 +229,7 @@ export default class RequestContext {
this._securityToken = securityToken; this._securityToken = securityToken;
this._policyArn = policyArn; this._policyArn = policyArn;
this._action = action; this._action = action;
this._needQuota = actionNeedQuotaCheck[apiMethod] === true this._needQuota = _actionNeedQuotaCheck[apiMethod] === true;
|| actionWithDataDeletion[apiMethod] === true;
this._requestObjTags = requestObjTags || null; this._requestObjTags = requestObjTags || null;
this._existingObjTag = existingObjTag || null; this._existingObjTag = existingObjTag || null;
this._needTagEval = needTagEval || false; this._needTagEval = needTagEval || false;

View File

@ -33,7 +33,6 @@ const sharedActionMap = {
bypassGovernanceRetention: 's3:BypassGovernanceRetention', bypassGovernanceRetention: 's3:BypassGovernanceRetention',
listMultipartUploads: 's3:ListBucketMultipartUploads', listMultipartUploads: 's3:ListBucketMultipartUploads',
listParts: 's3:ListMultipartUploadParts', listParts: 's3:ListMultipartUploadParts',
metadataSearch: 's3:MetadataSearch',
multipartDelete: 's3:AbortMultipartUpload', multipartDelete: 's3:AbortMultipartUpload',
objectDelete: 's3:DeleteObject', objectDelete: 's3:DeleteObject',
objectDeleteTagging: 's3:DeleteObjectTagging', objectDeleteTagging: 's3:DeleteObjectTagging',
@ -48,14 +47,6 @@ const sharedActionMap = {
objectPutLegalHold: 's3:PutObjectLegalHold', objectPutLegalHold: 's3:PutObjectLegalHold',
objectPutRetention: 's3:PutObjectRetention', objectPutRetention: 's3:PutObjectRetention',
objectPutTagging: 's3:PutObjectTagging', objectPutTagging: 's3:PutObjectTagging',
objectRestore: 's3:RestoreObject',
objectPutVersion: 's3:PutObjectVersion',
};
const actionMapBucketQuotas = {
bucketGetQuota: 'scality:GetBucketQuota',
bucketUpdateQuota: 'scality:UpdateBucketQuota',
bucketDeleteQuota: 'scality:DeleteBucketQuota',
}; };
// action map used for request context // action map used for request context
@ -71,7 +62,6 @@ const actionMapRQ = {
initiateMultipartUpload: 's3:PutObject', initiateMultipartUpload: 's3:PutObject',
objectDeleteVersion: 's3:DeleteObjectVersion', objectDeleteVersion: 's3:DeleteObjectVersion',
objectDeleteTaggingVersion: 's3:DeleteObjectVersionTagging', objectDeleteTaggingVersion: 's3:DeleteObjectVersionTagging',
objectGetArchiveInfo: 'scality:GetObjectArchiveInfo',
objectGetVersion: 's3:GetObjectVersion', objectGetVersion: 's3:GetObjectVersion',
objectGetACLVersion: 's3:GetObjectVersionAcl', objectGetACLVersion: 's3:GetObjectVersionAcl',
objectGetTaggingVersion: 's3:GetObjectVersionTagging', objectGetTaggingVersion: 's3:GetObjectVersionTagging',
@ -80,13 +70,10 @@ const actionMapRQ = {
objectPutTaggingVersion: 's3:PutObjectVersionTagging', objectPutTaggingVersion: 's3:PutObjectVersionTagging',
serviceGet: 's3:ListAllMyBuckets', serviceGet: 's3:ListAllMyBuckets',
objectReplicate: 's3:ReplicateObject', objectReplicate: 's3:ReplicateObject',
objectGetRetentionVersion: 's3:GetObjectRetention', objectPutRetentionVersion: 's3:PutObjectVersionRetention',
objectPutRetentionVersion: 's3:PutObjectRetention', objectPutLegalHoldVersion: 's3:PutObjectVersionLegalHold',
objectGetLegalHoldVersion: 's3:GetObjectLegalHold',
objectPutLegalHoldVersion: 's3:PutObjectLegalHold',
listObjectVersions: 's3:ListBucketVersions', listObjectVersions: 's3:ListBucketVersions',
...sharedActionMap, ...sharedActionMap,
...actionMapBucketQuotas,
}; };
// action map used for bucket policies // action map used for bucket policies
@ -138,7 +125,6 @@ const actionMonitoringMapS3 = {
initiateMultipartUpload: 'CreateMultipartUpload', initiateMultipartUpload: 'CreateMultipartUpload',
listMultipartUploads: 'ListMultipartUploads', listMultipartUploads: 'ListMultipartUploads',
listParts: 'ListParts', listParts: 'ListParts',
metadataSearch: 'MetadataSearch',
multiObjectDelete: 'DeleteObjects', multiObjectDelete: 'DeleteObjects',
multipartDelete: 'AbortMultipartUpload', multipartDelete: 'AbortMultipartUpload',
objectCopy: 'CopyObject', objectCopy: 'CopyObject',
@ -157,17 +143,7 @@ const actionMonitoringMapS3 = {
objectPutPart: 'UploadPart', objectPutPart: 'UploadPart',
objectPutRetention: 'PutObjectRetention', objectPutRetention: 'PutObjectRetention',
objectPutTagging: 'PutObjectTagging', objectPutTagging: 'PutObjectTagging',
objectRestore: 'RestoreObject',
serviceGet: 'ListBuckets', serviceGet: 'ListBuckets',
bucketGetQuota: 'GetBucketQuota',
bucketUpdateQuota: 'UpdateBucketQuota',
bucketDeleteQuota: 'DeleteBucketQuota',
};
const actionMapAccountQuotas = {
UpdateAccountQuota : 'scality:UpdateAccountQuota',
DeleteAccountQuota : 'scality:DeleteAccountQuota',
GetAccountQuota : 'scality:GetAccountQuota',
}; };
const actionMapIAM = { const actionMapIAM = {
@ -192,7 +168,6 @@ const actionMapIAM = {
getPolicyVersion: 'iam:GetPolicyVersion', getPolicyVersion: 'iam:GetPolicyVersion',
getUser: 'iam:GetUser', getUser: 'iam:GetUser',
listAccessKeys: 'iam:ListAccessKeys', listAccessKeys: 'iam:ListAccessKeys',
listEntitiesForPolicy: 'iam:ListEntitiesForPolicy',
listGroupPolicies: 'iam:ListGroupPolicies', listGroupPolicies: 'iam:ListGroupPolicies',
listGroups: 'iam:ListGroups', listGroups: 'iam:ListGroups',
listGroupsForUser: 'iam:ListGroupsForUser', listGroupsForUser: 'iam:ListGroupsForUser',
@ -211,7 +186,6 @@ const actionMapIAM = {
tagUser: 'iam:TagUser', tagUser: 'iam:TagUser',
unTagUser: 'iam:UntagUser', unTagUser: 'iam:UntagUser',
listUserTags: 'iam:ListUserTags', listUserTags: 'iam:ListUserTags',
...actionMapAccountQuotas,
}; };
const actionMapSSO = { const actionMapSSO = {

View File

@ -142,8 +142,6 @@ export function findConditionKey(
// header // header
case 's3:ObjLocationConstraint': return headers['x-amz-meta-scal-location-constraint']; case 's3:ObjLocationConstraint': return headers['x-amz-meta-scal-location-constraint'];
case 'sts:ExternalId': return requestContext.getRequesterExternalId(); case 'sts:ExternalId': return requestContext.getRequesterExternalId();
case 'keycloak:groups': return requesterInfo.keycloakGroup;
case 'keycloak:roles': return requesterInfo.keycloakRole;
case 'iam:PolicyArn': return requestContext.getPolicyArn(); case 'iam:PolicyArn': return requestContext.getPolicyArn();
// s3:ExistingObjectTag - Used to check that existing object tag has // s3:ExistingObjectTag - Used to check that existing object tag has
// specific tag key and value. Extraction of correct tag key is done in CloudServer. // specific tag key and value. Extraction of correct tag key is done in CloudServer.

View File

@ -30,7 +30,7 @@ export default class ResultsCollector extends EventEmitter {
* @emits ResultCollector#done * @emits ResultCollector#done
* @emits ResultCollector#error * @emits ResultCollector#error
*/ */
pushResult(err: Error | null | undefined, subPartIndex: number) { pushResult(err: Error | undefined, subPartIndex: number) {
this._results.push({ this._results.push({
error: err, error: err,
subPartIndex, subPartIndex,

View File

@ -1,7 +1,6 @@
import assert from 'assert'; import assert from 'assert';
import * as crypto from 'crypto'; import * as crypto from 'crypto';
import * as stream from 'stream'; import * as stream from 'stream';
import azure from '@azure/storage-blob';
import { RequestLogger } from 'werelogs'; import { RequestLogger } from 'werelogs';
@ -9,7 +8,7 @@ import ResultsCollector from './ResultsCollector';
import SubStreamInterface from './SubStreamInterface'; import SubStreamInterface from './SubStreamInterface';
import * as objectUtils from '../objectUtils'; import * as objectUtils from '../objectUtils';
import MD5Sum from '../MD5Sum'; import MD5Sum from '../MD5Sum';
import errors, { ArsenalError } from '../../errors'; import errors from '../../errors';
export const splitter = '|'; export const splitter = '|';
export const overviewMpuKey = 'azure_mpu'; export const overviewMpuKey = 'azure_mpu';
@ -65,7 +64,7 @@ export const getBlockId = (
const paddedSubPart = padString(subPartIndex, 'subPart'); const paddedSubPart = padString(subPartIndex, 'subPart');
const blockId = `${uploadId}${splitter}partNumber${paddedPartNumber}` + const blockId = `${uploadId}${splitter}partNumber${paddedPartNumber}` +
`${splitter}subPart${paddedSubPart}${splitter}`; `${splitter}subPart${paddedSubPart}${splitter}`;
return Buffer.from(padString(blockId, 'part')).toString('base64'); return padString(blockId, 'part');
}; };
export const getSummaryPartId = (partNumber: number, eTag: string, size: number) => { export const getSummaryPartId = (partNumber: number, eTag: string, size: number) => {
@ -104,17 +103,10 @@ export const getSubPartIds = (
) => [...Array(part.numberSubParts).keys()].map(subPartIndex => ) => [...Array(part.numberSubParts).keys()].map(subPartIndex =>
getBlockId(uploadId, part.partNumber, subPartIndex)); getBlockId(uploadId, part.partNumber, subPartIndex));
type ErrorWrapperFn = ( // TODO Better type this
s3Method: string,
azureMethod: string,
command: (client: azure.ContainerClient) => Promise<any>,
log: RequestLogger,
cb: (err: ArsenalError | null | undefined) => void,
) => void
export const putSinglePart = ( export const putSinglePart = (
errorWrapperFn: ErrorWrapperFn, errorWrapperFn: (first: string, second: string, third: any, log: any, cb: any) => void,
request: stream.Readable, request: any,
params: { params: {
bucketName: string; bucketName: string;
partNumber: number; partNumber: number;
@ -125,44 +117,44 @@ export const putSinglePart = (
}, },
dataStoreName: string, dataStoreName: string,
log: RequestLogger, log: RequestLogger,
cb: (err: ArsenalError | null | undefined, dataStoreETag?: string, size?: number) => void, cb: any,
) => { ) => {
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId } const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
= params; = params;
const blockId = getBlockId(uploadId, partNumber, 0); const blockId = getBlockId(uploadId, partNumber, 0);
const passThrough = new stream.PassThrough(); const passThrough = new stream.PassThrough();
const options = contentMD5 const options = contentMD5
? { transactionalContentMD5: objectUtils.getMD5Buffer(contentMD5) } ? { useTransactionalMD5: true, transactionalContentMD5: contentMD5 }
: {}; : {};
request.pipe(passThrough); request.pipe(passThrough);
return errorWrapperFn('uploadPart', 'createBlockFromStream', async client => { return errorWrapperFn('uploadPart', 'createBlockFromStream',
try { [blockId, bucketName, objectKey, passThrough, size, options,
const result = await client.getBlockBlobClient(objectKey) (err: any | null, result: any) => {
.stageBlock(blockId, () => passThrough, size, options); if (err) {
const md5 = result.contentMD5 || '';
const eTag = objectUtils.getHexMD5(md5);
return eTag
} catch (err: any) {
log.error('Error from Azure data backend uploadPart', log.error('Error from Azure data backend uploadPart',
{ error: err.message, dataStoreName }); { error: err.message, dataStoreName });
if (err.code === 'ContainerNotFound') { if (err.code === 'ContainerNotFound') {
throw errors.NoSuchBucket; return cb(errors.NoSuchBucket);
} }
if (err.code === 'InvalidMd5') { if (err.code === 'InvalidMd5') {
throw errors.InvalidDigest; return cb(errors.InvalidDigest);
} }
if (err.code === 'Md5Mismatch') { if (err.code === 'Md5Mismatch') {
throw errors.BadDigest; return cb(errors.BadDigest);
} }
throw errors.InternalError.customizeDescription( return cb(errors.InternalError.customizeDescription(
`Error returned from Azure: ${err.message}` `Error returned from Azure: ${err.message}`),
); );
} }
}, log, cb); const md5 = result.headers['content-md5'] || '';
const eTag = objectUtils.getHexMD5(md5);
return cb(null, eTag, size);
}], log, cb);
}; };
const putNextSubPart = ( // TODO type this
errorWrapperFn: ErrorWrapperFn, export const putNextSubPart = (
errorWrapperFn: any,
partParams: { partParams: {
uploadId: string; uploadId: string;
partNumber: number; partNumber: number;
@ -170,10 +162,11 @@ const putNextSubPart = (
objectKey: string; objectKey: string;
}, },
subPartInfo: { lastPartIndex: number; lastPartSize: number }, subPartInfo: { lastPartIndex: number; lastPartSize: number },
subPartStream: stream.Readable, subPartStream: any,
subPartIndex: number, subPartIndex: number,
resultsCollector: ResultsCollector, resultsCollector: ResultsCollector,
log: RequestLogger, log: RequestLogger,
cb: any,
) => { ) => {
const { uploadId, partNumber, bucketName, objectKey } = partParams; const { uploadId, partNumber, bucketName, objectKey } = partParams;
const subPartSize = getSubPartSize( const subPartSize = getSubPartSize(
@ -181,20 +174,14 @@ const putNextSubPart = (
const subPartId = getBlockId(uploadId, partNumber, const subPartId = getBlockId(uploadId, partNumber,
subPartIndex); subPartIndex);
resultsCollector.pushOp(); resultsCollector.pushOp();
errorWrapperFn('uploadPart', 'createBlockFromStream', async client => { errorWrapperFn('uploadPart', 'createBlockFromStream',
try { [subPartId, bucketName, objectKey, subPartStream, subPartSize,
const result = await client.getBlockBlobClient(objectKey) {}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb);
.stageBlock(subPartId, () => subPartStream, subPartSize, {});
resultsCollector.pushResult(null, subPartIndex);
} catch (err: any) {
resultsCollector.pushResult(err, subPartIndex);
}
}, log, () => {});
}; };
export const putSubParts = ( export const putSubParts = (
errorWrapperFn: ErrorWrapperFn, errorWrapperFn: any,
request: stream.Readable, request: any,
params: { params: {
uploadId: string; uploadId: string;
partNumber: number; partNumber: number;
@ -204,7 +191,7 @@ export const putSubParts = (
}, },
dataStoreName: string, dataStoreName: string,
log: RequestLogger, log: RequestLogger,
cb: (err: ArsenalError | null | undefined, dataStoreETag?: string) => void, cb: any,
) => { ) => {
const subPartInfo = getSubPartInfo(params.size); const subPartInfo = getSubPartInfo(params.size);
const resultsCollector = new ResultsCollector(); const resultsCollector = new ResultsCollector();
@ -243,13 +230,14 @@ export const putSubParts = (
const totalLength = streamInterface.getTotalBytesStreamed(); const totalLength = streamInterface.getTotalBytesStreamed();
log.trace('successfully put subparts to Azure', log.trace('successfully put subparts to Azure',
{ numberSubParts, totalLength }); { numberSubParts, totalLength });
hashedStream.on('hashed', () => cb(null, hashedStream.completedHash)); hashedStream.on('hashed', () => cb(null, hashedStream.completedHash,
totalLength));
// in case the hashed event was already emitted before the // in case the hashed event was already emitted before the
// event handler was registered: // event handler was registered:
if (hashedStream.completedHash) { if (hashedStream.completedHash) {
hashedStream.removeAllListeners('hashed'); hashedStream.removeAllListeners('hashed');
return cb(null, hashedStream.completedHash); return cb(null, hashedStream.completedHash, totalLength);
} }
return undefined; return undefined;
}); });
@ -257,7 +245,7 @@ export const putSubParts = (
const currentStream = streamInterface.getCurrentStream(); const currentStream = streamInterface.getCurrentStream();
// start first put to Azure before we start streaming the data // start first put to Azure before we start streaming the data
putNextSubPart(errorWrapperFn, params, subPartInfo, putNextSubPart(errorWrapperFn, params, subPartInfo,
currentStream, 0, resultsCollector, log); currentStream, 0, resultsCollector, log, cb);
request.pipe(hashedStream); request.pipe(hashedStream);
hashedStream.on('end', () => { hashedStream.on('end', () => {
@ -277,8 +265,8 @@ export const putSubParts = (
} }
const { nextStream, subPartIndex } = const { nextStream, subPartIndex } =
streamInterface.transitionToNextStream(); streamInterface.transitionToNextStream();
putNextSubPart(errorWrapperFn, params, subPartInfo, nextStream, putNextSubPart(errorWrapperFn, params, subPartInfo,
subPartIndex, resultsCollector, log); nextStream, subPartIndex, resultsCollector, log, cb);
streamInterface.write(firstChunk); streamInterface.write(firstChunk);
} else { } else {
streamInterface.write(data); streamInterface.write(data);

View File

@ -1,25 +1,19 @@
import { scaleMsPerDay } from '../objectUtils'; const oneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day.
const msInOneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day.
export default class LifecycleDateTime { export default class LifecycleDateTime {
_transitionOneDayEarlier?: boolean; _transitionOneDayEarlier?: boolean;
_expireOneDayEarlier?: boolean; _expireOneDayEarlier?: boolean;
_timeProgressionFactor?: number;
_scaledMsPerDay: number;
constructor(params?: { constructor(params?: {
transitionOneDayEarlier: boolean; transitionOneDayEarlier: boolean;
expireOneDayEarlier: boolean; expireOneDayEarlier: boolean;
timeProgressionFactor: number;
}) { }) {
this._transitionOneDayEarlier = params?.transitionOneDayEarlier; this._transitionOneDayEarlier = params?.transitionOneDayEarlier;
this._expireOneDayEarlier = params?.expireOneDayEarlier; this._expireOneDayEarlier = params?.expireOneDayEarlier;
this._timeProgressionFactor = params?.timeProgressionFactor || 1;
this._scaledMsPerDay = scaleMsPerDay(this._timeProgressionFactor);
} }
getCurrentDate() { getCurrentDate() {
const timeTravel = this._expireOneDayEarlier ? msInOneDay : 0; const timeTravel = this._expireOneDayEarlier ? oneDay : 0;
return Date.now() + timeTravel; return Date.now() + timeTravel;
} }
@ -31,7 +25,7 @@ export default class LifecycleDateTime {
findDaysSince(date: Date) { findDaysSince(date: Date) {
const now = this.getCurrentDate(); const now = this.getCurrentDate();
const diff = now - date.getTime(); const diff = now - date.getTime();
return Math.floor(diff / this._scaledMsPerDay); return Math.floor(diff / (1000 * 60 * 60 * 24));
} }
/** /**
@ -58,25 +52,8 @@ export default class LifecycleDateTime {
} }
if (transition.Days !== undefined) { if (transition.Days !== undefined) {
const lastModifiedTime = this.getTimestamp(lastModified); const lastModifiedTime = this.getTimestamp(lastModified);
const timeTravel = this._transitionOneDayEarlier ? -msInOneDay : 0; const timeTravel = this._transitionOneDayEarlier ? -oneDay : 0;
return lastModifiedTime + (transition.Days * this._scaledMsPerDay) + timeTravel; return lastModifiedTime + (transition.Days * oneDay) + timeTravel;
}
}
/**
* Find the Unix time at which the non-current version transition should occur.
* @param transition - A non-current version transition from the lifecycle non-current version transitions
* @param lastModified - The object's last modified date
* @return - The normalized transition timestamp
*/
getNCVTransitionTimestamp(
transition: { NoncurrentDays?: number },
lastModified: string,
) {
if (transition.NoncurrentDays !== undefined) {
const lastModifiedTime = this.getTimestamp(lastModified);
const timeTravel = this._transitionOneDayEarlier ? -msInOneDay : 0;
return lastModifiedTime + (transition.NoncurrentDays * this._scaledMsPerDay) + timeTravel;
} }
} }
} }

View File

@ -61,47 +61,6 @@ export default class LifecycleUtils {
return trans1 > trans2 ? transition1 : transition2; return trans1 > trans2 ? transition1 : transition2;
} }
/**
* Compare two non-current version transition rules and return the one that is most recent.
* @param params - The function parameters
* @param params.transition1 - A non-current version transition from the current rule
* @param params.transition2 - A non-current version transition from the previous rule
* @param params.lastModified - The object's last modified
* date
* @return The most applicable transition rule
*/
compareNCVTransitions(params: {
lastModified: string;
transition1: any;
transition2?: any;
}): number | undefined;
compareNCVTransitions(params: {
lastModified: string;
transition1?: any;
transition2: any;
}): number | undefined;
compareNCVTransitions(params: {
lastModified: string;
transition1: any;
transition2: any;
}): number | undefined;
compareNCVTransitions(params: {
lastModified: string;
transition1?: any;
transition2?: any;
}) {
const { transition1, transition2, lastModified } = params;
if (transition1 === undefined) {
return transition2;
}
if (transition2 === undefined) {
return transition1;
}
const trans1 = this._datetime.getNCVTransitionTimestamp(transition1!, lastModified)!;
const trans2 = this._datetime.getNCVTransitionTimestamp(transition2!, lastModified)!;
return trans1 > trans2 ? transition1 : transition2;
}
// TODO Fix This // TODO Fix This
/** /**
* Find the most relevant trantition rule for the given transitions array * Find the most relevant trantition rule for the given transitions array
@ -139,42 +98,6 @@ export default class LifecycleUtils {
}); });
} }
/**
* Find the most relevant non-current version transition rule for the given transitions array
* and any previously stored non-current version transition from another rule.
* @param params - The function parameters
* @param params.transitions - Array of lifecycle non-current version transitions
* @param params.lastModified - The object's last modified
* date
* @return The most applicable non-current version transition rule
*/
getApplicableNCVTransition(params: {
store: any;
currentDate: Date;
transitions: any[];
lastModified: string;
}) {
const { transitions, store, lastModified, currentDate } = params;
const transition = transitions.reduce((result, transition) => {
const isApplicable = // Is the transition time in the past?
this._datetime.getTimestamp(currentDate) >=
this._datetime.getNCVTransitionTimestamp(transition, lastModified)!;
if (!isApplicable) {
return result;
}
return this.compareNCVTransitions({
transition1: transition,
transition2: result,
lastModified,
});
}, undefined);
return this.compareNCVTransitions({
transition1: transition,
transition2: store.NoncurrentVersionTransition,
lastModified,
});
}
// TODO // TODO
/** /**
* Filter out all rules based on `Status` and `Filter` (Prefix and Tags) * Filter out all rules based on `Status` and `Filter` (Prefix and Tags)
@ -318,17 +241,7 @@ export default class LifecycleUtils {
currentDate, currentDate,
}); });
} }
// TODO: Add support for NoncurrentVersionTransitions.
const ncvt = 'NoncurrentVersionTransitions';
const hasNoncurrentVersionTransitions = Array.isArray(rule[ncvt]) && rule[ncvt].length > 0;
if (hasNoncurrentVersionTransitions && this._supportedRules.includes('noncurrentVersionTransition')) {
store.NoncurrentVersionTransition = this.getApplicableNCVTransition({
transitions: rule.NoncurrentVersionTransitions,
lastModified: metadata.LastModified,
store,
currentDate,
});
}
return store; return store;
}, {}); }, {});
// Do not transition to a location where the object is already stored. // Do not transition to a location where the object is already stored.
@ -336,12 +249,6 @@ export default class LifecycleUtils {
&& applicableRules.Transition.StorageClass === metadata.StorageClass) { && applicableRules.Transition.StorageClass === metadata.StorageClass) {
applicableRules.Transition = undefined; applicableRules.Transition = undefined;
} }
if (applicableRules.NoncurrentVersionTransition
&& applicableRules.NoncurrentVersionTransition.StorageClass === metadata.StorageClass) {
applicableRules.NoncurrentVersionTransition = undefined;
}
return applicableRules; return applicableRules;
/* eslint-enable no-param-reassign */ /* eslint-enable no-param-reassign */
} }

View File

@ -1,110 +0,0 @@
import {parseStringPromise} from 'xml2js';
import errors, {ArsenalError} from '../errors';
import * as werelogs from 'werelogs';
import {validRestoreObjectTiers} from "../constants";
/*
Format of xml request:
<RestoreRequest>
<Days>integer</Days>
<Tier>Standard|Bulk|Expedited</Tier>
</RestoreRequest>
*/
/**
* validate restore request xml
* @param restoreRequest - parsed restore request object
* @return{ArsenalError|undefined} - error on failure, undefined on success
*/
function validateRestoreRequest(restoreRequest?: any) {
if (!restoreRequest) {
const desc = 'request xml does not contain RestoreRequest';
return errors.MalformedXML.customizeDescription(desc);
}
if (!restoreRequest.Days || !restoreRequest.Days[0]) {
const desc = 'request xml does not contain RestoreRequest.Days';
return errors.MalformedXML.customizeDescription(desc);
}
// RestoreRequest.Days must be greater than or equal to 1
const daysValue = Number.parseInt(restoreRequest.Days[0], 10);
if (Number.isNaN(daysValue)) {
const desc = `RestoreRequest.Days is invalid type. [${restoreRequest.Days[0]}]`;
return errors.MalformedXML.customizeDescription(desc);
}
if (daysValue < 1) {
const desc = `RestoreRequest.Days must be greater than 0. [${restoreRequest.Days[0]}]`;
return errors.MalformedXML.customizeDescription(desc);
}
if (daysValue > 2147483647) {
const desc = `RestoreRequest.Days must be less than 2147483648. [${restoreRequest.Days[0]}]`;
return errors.MalformedXML.customizeDescription(desc);
}
if (restoreRequest.Tier && restoreRequest.Tier[0] && !validRestoreObjectTiers.has(restoreRequest.Tier[0])) {
const desc = `RestoreRequest.Tier is invalid value. [${restoreRequest.Tier[0]}]`;
return errors.MalformedXML.customizeDescription(desc);
}
return undefined;
}
/**
* parseRestoreRequestXml - Parse and validate xml body, returning callback with
* object restoreReqObj: { days: <value>, tier: <value> }
* @param xml - xml body to parse and validate
* @param log - Werelogs logger
* @param cb - callback to server
* @return - calls callback with object restore request or error
*/
export async function parseRestoreRequestXml(
xml: string,
log: werelogs.Logger,
cb: (err: ArsenalError | null, data?: any) => void,
) {
let result;
try {
result = await parseStringPromise(xml);
} catch (err) {
log.debug('xml parsing failed', {
error: err,
method: 'parseRestoreXml',
xml,
});
return cb(errors.MalformedXML);
}
if (!result) {
const desc = 'request xml is undefined or empty';
return cb(errors.MalformedXML.customizeDescription(desc));
}
const restoreRequest = result.RestoreRequest;
const restoreReqError = validateRestoreRequest(restoreRequest);
if (restoreReqError) {
log.debug('restore request validation failed', {
error: restoreReqError,
method: 'validateRestoreRequest',
xml,
});
return cb(restoreReqError);
}
// If do not specify Tier, set "Standard"
return cb(null, {
days: Number.parseInt(restoreRequest.Days, 10),
tier: restoreRequest.Tier && restoreRequest.Tier[0] ? restoreRequest.Tier[0] : 'Standard',
});
}
/**
* convertToXml - Convert restore request info object to xml
* @param days - restore days
* @param tier - restore tier
* @return - returns restore request information xml string
*/
export function convertToXml(days: string, tier: string) {
if (!(days && tier)) {
return '';
}
return [
'<RestoreRequest xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
`<Days>${days}</Days>`,
`<Tier>${tier}</Tier>`,
'</RestoreRequest>',
].join('');
}

View File

@ -1,21 +1,5 @@
const msInOneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day. export const getHexMD5 = (base64MD5: WithImplicitCoercion<string>) =>
Buffer.from(base64MD5, 'base64').toString('hex');
export const getMD5Buffer = (base64MD5: WithImplicitCoercion<string> | Uint8Array) =>
base64MD5 instanceof Uint8Array ? base64MD5 : Buffer.from(base64MD5, 'base64')
export const getHexMD5 = (base64MD5: WithImplicitCoercion<string> | Uint8Array) =>
getMD5Buffer(base64MD5).toString('hex');
export const getBase64MD5 = (hexMD5: WithImplicitCoercion<string>) => export const getBase64MD5 = (hexMD5: WithImplicitCoercion<string>) =>
Buffer.from(hexMD5, 'hex').toString('base64'); Buffer.from(hexMD5, 'hex').toString('base64');
/**
* Calculates the number of scaled milliseconds per day based on the given time progression factor.
* This function is intended for testing and simulation purposes only.
* @param {number} timeProgressionFactor - The desired time progression factor for scaling.
* @returns {number} The number of scaled milliseconds per day.
* If the result is 0, the minimum value of 1 millisecond is returned.
*/
export const scaleMsPerDay = (timeProgressionFactor: number): number =>
Math.round(msInOneDay / (timeProgressionFactor || 1)) || 1;

View File

@ -3,11 +3,6 @@ import * as werelogs from 'werelogs';
import errors, { ArsenalError } from '../errors'; import errors, { ArsenalError } from '../errors';
import escapeForXml from './escapeForXml'; import escapeForXml from './escapeForXml';
export interface BucketTag {
Key: string;
Value: string;
};
const errorInvalidArgument = () => errors.InvalidArgument const errorInvalidArgument = () => errors.InvalidArgument
.customizeDescription('The header \'x-amz-tagging\' shall be ' + .customizeDescription('The header \'x-amz-tagging\' shall be ' +
'encoded as UTF-8 then URLEncoded URL query parameters without ' + 'encoded as UTF-8 then URLEncoded URL query parameters without ' +
@ -37,15 +32,6 @@ export const _validator = {
&& tag.Key[0] !== undefined && tag.Value[0] !== undefined && tag.Key[0] !== undefined && tag.Value[0] !== undefined
&& typeof tag.Key[0] === 'string' && typeof tag.Value[0] === 'string', && typeof tag.Key[0] === 'string' && typeof tag.Value[0] === 'string',
// Allowed characters are letters, whitespace, and numbers, plus
// the following special characters: + - = . _ : /
// Maximum key length: 128 Unicode characters
// Maximum value length: 256 Unicode characters
validateTagObjectStructure: (tag: BucketTag) => tag
&& Object.keys(tag).length === 2
&& typeof tag.Key === 'string' && typeof tag.Value === 'string'
&& tag.Key.length >= 1 && tag.Value.length >= 1,
validateXMLStructure: (result: any) => validateXMLStructure: (result: any) =>
result && Object.keys(result).length === 1 && result && Object.keys(result).length === 1 &&
result.Tagging && result.Tagging &&
@ -114,47 +100,12 @@ function _validateTags(tags: Array<{ Key: string[], Value: string[] }>) {
} }
// not repeating keys // not repeating keys
if (tags.length > Object.keys(tagsResult).length) { if (tags.length > Object.keys(tagsResult).length) {
return errors.InvalidTag.customizeDescription( return errors.InvalidTag.customizeDescription('Cannot provide ' +
'Cannot provide multiple Tags with the same key' 'multiple Tags with the same key');
);
} }
return tagsResult; return tagsResult;
} }
/** areTagsValid - Validate bucket tags
* @param tags - tags parsed from xml to be validated
* @return result - true if the tags are valide, false otherwise
*/
export function areTagsValid(tags: Array<BucketTag>) {
if (tags.length === 0) {
return true;
}
// Maximum number of tags per resource: 50
if (tags.length > 50) {
return false;
}
const tagsResult = {};
for (const tag of tags) {
if (!_validator.validateTagObjectStructure(tag)) {
return false;
}
const { Key: key, Value: value } = tag;
const result = _validator.validateKeyValue(key, value);
if (result instanceof Error) {
return false;
}
tagsResult[key] = value;
}
// not repeating keys
if (tags.length > Object.keys(tagsResult).length) {
return false;
}
return true;
}
/** parseTagXml - Parse and validate xml body, returning callback with object /** parseTagXml - Parse and validate xml body, returning callback with object
* tags : { key: value} * tags : { key: value}
* @param xml - xml body to parse and validate * @param xml - xml body to parse and validate

View File

@ -77,34 +77,6 @@ export function _checkUnmodifiedSince(
return { present: false, error: null }; return { present: false, error: null };
} }
/**
* checks 'if-modified-since' and 'if-unmodified-since' headers if included in
* request against last-modified date of object
* @param headers - headers from request object
* @param lastModified - last modified date of object
* @return contains modifiedSince and unmodifiedSince res objects
*/
export function checkDateModifiedHeaders(
headers: http.IncomingHttpHeaders,
lastModified: string,
) {
const lastModifiedDate = new Date(lastModified);
lastModifiedDate.setMilliseconds(0);
const millis = lastModifiedDate.getTime();
const ifModifiedSinceHeader = headers['if-modified-since'] ||
headers['x-amz-copy-source-if-modified-since'];
const ifUnmodifiedSinceHeader = headers['if-unmodified-since'] ||
headers['x-amz-copy-source-if-unmodified-since'];
const modifiedSinceRes = _checkModifiedSince(ifModifiedSinceHeader?.toString(),
millis);
const unmodifiedSinceRes = _checkUnmodifiedSince(ifUnmodifiedSinceHeader?.toString(),
millis);
return { modifiedSinceRes, unmodifiedSinceRes };
}
/** /**
* validateConditionalHeaders - validates 'if-modified-since', * validateConditionalHeaders - validates 'if-modified-since',
* 'if-unmodified-since', 'if-match' or 'if-none-match' headers if included in * 'if-unmodified-since', 'if-match' or 'if-none-match' headers if included in
@ -120,14 +92,21 @@ export function validateConditionalHeaders(
lastModified: string, lastModified: string,
contentMD5: string, contentMD5: string,
): {} | { present: boolean; error: ArsenalError } { ): {} | { present: boolean; error: ArsenalError } {
const lastModifiedDate = new Date(lastModified);
lastModifiedDate.setMilliseconds(0);
const millis = lastModifiedDate.getTime();
const ifMatchHeader = headers['if-match'] || const ifMatchHeader = headers['if-match'] ||
headers['x-amz-copy-source-if-match']; headers['x-amz-copy-source-if-match'];
const ifNoneMatchHeader = headers['if-none-match'] || const ifNoneMatchHeader = headers['if-none-match'] ||
headers['x-amz-copy-source-if-none-match']; headers['x-amz-copy-source-if-none-match'];
const ifModifiedSinceHeader = headers['if-modified-since'] ||
headers['x-amz-copy-source-if-modified-since'];
const ifUnmodifiedSinceHeader = headers['if-unmodified-since'] ||
headers['x-amz-copy-source-if-unmodified-since'];
const etagMatchRes = _checkEtagMatch(ifMatchHeader?.toString(), contentMD5); const etagMatchRes = _checkEtagMatch(ifMatchHeader?.toString(), contentMD5);
const etagNoneMatchRes = _checkEtagNoneMatch(ifNoneMatchHeader?.toString(), contentMD5); const etagNoneMatchRes = _checkEtagNoneMatch(ifNoneMatchHeader?.toString(), contentMD5);
const { modifiedSinceRes, unmodifiedSinceRes } = const modifiedSinceRes = _checkModifiedSince(ifModifiedSinceHeader?.toString(), millis);
checkDateModifiedHeaders(headers, lastModified); const unmodifiedSinceRes = _checkUnmodifiedSince(ifUnmodifiedSinceHeader?.toString(), millis);
// If-Unmodified-Since condition evaluates to false and If-Match // If-Unmodified-Since condition evaluates to false and If-Match
// is not present, then return the error. Otherwise, If-Unmodified-Since is // is not present, then return the error. Otherwise, If-Unmodified-Since is
// silent when If-Match match, and when If-Match does not match, it's the // silent when If-Match match, and when If-Match does not match, it's the

View File

@ -13,7 +13,7 @@ import * as routesUtils from './routesUtils';
import routeWebsite from './routes/routeWebsite'; import routeWebsite from './routes/routeWebsite';
import * as http from 'http'; import * as http from 'http';
import StatsClient from '../metrics/StatsClient'; import StatsClient from '../metrics/StatsClient';
import { objectKeyByteLimit } from '../constants';
import * as requestUtils from '../../lib/policyEvaluator/requestUtils'; import * as requestUtils from '../../lib/policyEvaluator/requestUtils';
const routeMap = { const routeMap = {
@ -67,14 +67,8 @@ function checkBucketAndKey(
blacklistedPrefixes.object); blacklistedPrefixes.object);
if (!result.isValid) { if (!result.isValid) {
log.debug('invalid object key', { objectKey }); log.debug('invalid object key', { objectKey });
if (result.invalidPrefix) { return errors.InvalidArgument.customizeDescription('Object key ' +
return errors.InvalidArgument.customizeDescription('Invalid ' + `must not start with "${result.invalidPrefix}".`);
'prefix - object key cannot start with ' +
`"${result.invalidPrefix}".`);
}
return errors.KeyTooLong.customizeDescription('Object key is too ' +
'long. Maximum number of bytes allowed in keys is ' +
`${objectKeyByteLimit}.`);
} }
} }
if ((reqQuery.partNumber || reqQuery.uploadId) if ((reqQuery.partNumber || reqQuery.uploadId)
@ -156,6 +150,7 @@ export type Params = {
}; };
unsupportedQueries: any; unsupportedQueries: any;
api: { callApiMethod: routesUtils.CallApiMethod }; api: { callApiMethod: routesUtils.CallApiMethod };
oTel?: any,
} }
/** routes - route request to appropriate method /** routes - route request to appropriate method
@ -186,6 +181,22 @@ export default function routes(
logger: RequestLogger, logger: RequestLogger,
s3config?: any, s3config?: any,
) { ) {
const {
dataRetrievalParams: {
oTel: {
tracer,
activeSpan,
activeTracerContext,
}
}
} = params;
return tracer.startActiveSpan('Validating Request Parameters with Arsenal', undefined, activeTracerContext, arsenalValidatorSpan => {
arsenalValidatorSpan.setAttributes({
'code.lineno': 176,
'code.filename': 'lib/s3routes/routes.ts',
'code.function': 'routes()',
'code.url': 'https://github.com/scality/Arsenal/blob/development/7.70/lib/s3routes/routes.ts#L176'
});
checkTypes(req, res, params, logger); checkTypes(req, res, params, logger);
const { const {
@ -219,8 +230,7 @@ export default function routes(
// @ts-ignore // @ts-ignore
logger.newRequestLogger()); logger.newRequestLogger());
if (!req.url!.startsWith('/_/healthcheck') && if (!req.url!.startsWith('/_/healthcheck')) {
!req.url!.startsWith('/_/report')) {
log.info('received request', clientInfo); log.info('received request', clientInfo);
} }
@ -233,6 +243,7 @@ export default function routes(
internalServiceName = internalServiceName.slice(0, serviceDelim); internalServiceName = internalServiceName.slice(0, serviceDelim);
} }
if (internalHandlers[internalServiceName] === undefined) { if (internalHandlers[internalServiceName] === undefined) {
activeSpan.recordException(errors.InvalidURI);
return routesUtils.responseXMLBody( return routesUtils.responseXMLBody(
errors.InvalidURI, null, res, log); errors.InvalidURI, null, res, log);
} }
@ -249,6 +260,8 @@ export default function routes(
const validHosts = allEndpoints.concat(websiteEndpoints); const validHosts = allEndpoints.concat(websiteEndpoints);
routesUtils.normalizeRequest(req, validHosts); routesUtils.normalizeRequest(req, validHosts);
} catch (err: any) { } catch (err: any) {
activeSpan.recordException(errors.InvalidURI.customizeDescription('Could not parse the ' +
'specified URI. Check your restEndpoints configuration.'));
log.debug('could not normalize request', { error: err.stack }); log.debug('could not normalize request', { error: err.stack });
return routesUtils.responseXMLBody( return routesUtils.responseXMLBody(
errors.InvalidURI.customizeDescription('Could not parse the ' + errors.InvalidURI.customizeDescription('Could not parse the ' +
@ -266,11 +279,21 @@ export default function routes(
// @ts-ignore // @ts-ignore
bodyLength: parseInt(req.headers['content-length'], 10) || 0, bodyLength: parseInt(req.headers['content-length'], 10) || 0,
}); });
activeSpan.setAttributes({
// @ts-ignore
'aws.s3.bucket': req.bucketName,
// @ts-ignore
'aws.s3.key': req.objectKey,
// @ts-ignore
'aws.request_id': log.getUids().join(':'),
'rpc.service': 'S3',
});
// @ts-ignore // @ts-ignore
const { error, method } = checkUnsupportedRoutes(req.method, req.query); const { error, method } = checkUnsupportedRoutes(req.method, req.query);
if (error) { if (error) {
activeSpan.recordException(error);
log.trace('error validating route or uri params', { error }); log.trace('error validating route or uri params', { error });
// @ts-ignore // @ts-ignore
return routesUtils.responseXMLBody(error, '', res, log); return routesUtils.responseXMLBody(error, '', res, log);
@ -282,6 +305,7 @@ export default function routes(
req.method, req.query, blacklistedPrefixes, log); req.method, req.query, blacklistedPrefixes, log);
if (bucketOrKeyError) { if (bucketOrKeyError) {
activeSpan.recordException(bucketOrKeyError)
log.trace('error with bucket or key value', log.trace('error with bucket or key value',
{ error: bucketOrKeyError }); { error: bucketOrKeyError });
return routesUtils.responseXMLBody(bucketOrKeyError, null, res, log); return routesUtils.responseXMLBody(bucketOrKeyError, null, res, log);
@ -292,6 +316,7 @@ export default function routes(
if (websiteEndpoints && websiteEndpoints.indexOf(req.parsedHost) > -1) { if (websiteEndpoints && websiteEndpoints.indexOf(req.parsedHost) > -1) {
return routeWebsite(req, res, api, log, statsClient, dataRetrievalParams); return routeWebsite(req, res, api, log, statsClient, dataRetrievalParams);
} }
arsenalValidatorSpan.end();
return method(req, res, api, log, statsClient, dataRetrievalParams); return method(req, res, api, log, statsClient, dataRetrievalParams);
});
} }

View File

@ -4,6 +4,7 @@ import * as routesUtils from '../routesUtils';
import errors from '../../errors'; import errors from '../../errors';
import StatsClient from '../../metrics/StatsClient'; import StatsClient from '../../metrics/StatsClient';
import * as http from 'http'; import * as http from 'http';
import { actionMonitoringMapS3 } from '../../policyEvaluator/utils/actionMaps';
export default function routeDELETE( export default function routeDELETE(
request: http.IncomingMessage, request: http.IncomingMessage,
@ -11,60 +12,146 @@ export default function routeDELETE(
api: { callApiMethod: routesUtils.CallApiMethod }, api: { callApiMethod: routesUtils.CallApiMethod },
log: RequestLogger, log: RequestLogger,
statsClient?: StatsClient, statsClient?: StatsClient,
dataRetrievalParams?: any,
) { ) {
const {
oTel: {
tracer,
activeSpan,
activeTracerContext,
},
} = dataRetrievalParams;
return tracer.startActiveSpan('Arsenal:: Performing Delete API related operations using Cloudserver, Vault and Metadata', undefined, activeTracerContext, cloudserverApiSpan => {
activeSpan.addEvent('Request validated, routing request using routeDELETE() in arsenal');
cloudserverApiSpan.setAttributes({
'code.lineno': 8,
'code.filename': 'lib/s3routes/routes/routeDELETE.ts',
'code.function': 'routeDELETE()',
})
activeSpan.addEvent('Detecting which API to route to using arsenal routeDELETE()')
const call = (name: string) => { const call = (name: string) => {
return api.callApiMethod(name, request, response, log, (err, corsHeaders) => { return api.callApiMethod(name, request, response, log, (err, corsHeaders) => {
cloudserverApiSpan.end();
const action = actionMonitoringMapS3[name];
activeSpan.addEvent(`${action} API operation complete`);
if (err) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseNoBody(err, corsHeaders, response, 204, log); return routesUtils.responseNoBody(err, corsHeaders, response, 204, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} }
log.debug('routing request', { method: 'routeDELETE' }); log.debug('routing request', { method: 'routeDELETE' });
const { query, objectKey } = request as any const { query, objectKey } = request as any
if (query?.uploadId) { if (query?.uploadId) {
// @ts-ignore
activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected AbortMultipartUpload API request');
activeSpan.setAttribute('rpc.method', 'AbortMultipartUpload');
if (objectKey === undefined) { if (objectKey === undefined) {
const message = 'A key must be specified'; const message = 'A key must be specified';
const err = errors.InvalidRequest.customizeDescription(message); const err = errors.InvalidRequest.customizeDescription(message);
activeSpan.recordException(err);
cloudserverApiSpan.end();
return routesUtils.responseNoBody(err, null, response, 200, log); return routesUtils.responseNoBody(err, null, response, 200, log);
} }
return call('multipartDelete'); return call('multipartDelete');
} else if (objectKey === undefined) { } else if (objectKey === undefined) {
if (query?.website !== undefined) { if (query?.website !== undefined) {
// @ts-ignore
activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected DeleteBucketWebsite API request');
activeSpan.setAttribute('rpc.method', 'DeleteBucketWebsite');
return call('bucketDeleteWebsite'); return call('bucketDeleteWebsite');
} else if (query?.cors !== undefined) { } else if (query?.cors !== undefined) {
// @ts-ignore
activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected DeleteBucketCors API request');
activeSpan.setAttribute('rpc.method', 'DeleteBucketCors');
return call('bucketDeleteCors'); return call('bucketDeleteCors');
} else if (query?.replication !== undefined) { } else if (query?.replication !== undefined) {
// @ts-ignore
activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected DeleteBucketReplication API request');
activeSpan.setAttribute('rpc.method', 'DeleteBucketReplication');
return call('bucketDeleteReplication'); return call('bucketDeleteReplication');
} else if (query?.lifecycle !== undefined) { } else if (query?.lifecycle !== undefined) {
// @ts-ignore
activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected DeleteBucketLifecycle API request');
activeSpan.setAttribute('rpc.method', 'DeleteBucketLifecycle');
return call('bucketDeleteLifecycle'); return call('bucketDeleteLifecycle');
} else if (query?.policy !== undefined) { } else if (query?.policy !== undefined) {
// @ts-ignore
activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected DeleteBucketPolicy API request');
activeSpan.setAttribute('rpc.method', 'DeleteBucketPolicy');
return call('bucketDeletePolicy'); return call('bucketDeletePolicy');
} else if (query?.encryption !== undefined) { } else if (query?.encryption !== undefined) {
// @ts-ignore
activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected DeleteBucketEncryption API request');
activeSpan.setAttribute('rpc.method', 'DeleteBucketEncryption');
return call('bucketDeleteEncryption'); return call('bucketDeleteEncryption');
} else if (query?.tagging !== undefined) { } else if (query?.tagging !== undefined) {
// @ts-ignore
activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected DeleteBucketTagging API request');
activeSpan.setAttribute('rpc.method', 'DeleteBucketTagging');
return call('bucketDeleteTagging'); return call('bucketDeleteTagging');
} else if (query?.quota !== undefined) {
return call('bucketDeleteQuota');
} }
// @ts-ignore
activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected DeleteBucket API request');
activeSpan.setAttribute('rpc.method', 'DeleteBucket');
call('bucketDelete'); call('bucketDelete');
} else { } else {
if (query?.tagging !== undefined) { if (query?.tagging !== undefined) {
// @ts-ignore
activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected DeleteObjectTagging API request');
activeSpan.setAttribute('rpc.method', 'DeleteObjectTagging');
return call('objectDeleteTagging'); return call('objectDeleteTagging');
} }
api.callApiMethod('objectDelete', request, response, log, // @ts-ignore
activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected DeleteObject API request');
activeSpan.setAttribute('rpc.method', 'DeleteObject');
return api.callApiMethod('objectDelete', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('DeleteObject API operation complete')
/* /*
* Since AWS expects a 204 regardless of the existence of * Since AWS expects a 204 regardless of the existence of
the object, the errors NoSuchKey and NoSuchVersion should not the object, the errors NoSuchKey and NoSuchVersion should not
* be sent back as a response. * be sent back as a response.
*/ */
if (err && !err.is.NoSuchKey && !err.is.NoSuchVersion) { if (err && !err.is.NoSuchKey && !err.is.NoSuchVersion) {
activeSpan.recordException(err);
cloudserverApiSpan.end();
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, undefined, log); response, undefined, log);
} }
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseNoBody(null, corsHeaders, response, return routesUtils.responseNoBody(null, corsHeaders, response,
204, log); 204, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} }
});
} }

View File

@ -4,6 +4,7 @@ import * as routesUtils from '../routesUtils';
import errors from '../../errors'; import errors from '../../errors';
import * as http from 'http'; import * as http from 'http';
import StatsClient from '../../metrics/StatsClient'; import StatsClient from '../../metrics/StatsClient';
import { actionMonitoringMapS3 } from '../../policyEvaluator/utils/actionMaps';
export default function routerGET( export default function routerGET(
request: http.IncomingMessage, request: http.IncomingMessage,
@ -13,21 +14,50 @@ export default function routerGET(
statsClient?: StatsClient, statsClient?: StatsClient,
dataRetrievalParams?: any, dataRetrievalParams?: any,
) { ) {
const {
oTel: {
tracer,
activeSpan,
activeTracerContext,
}
} = dataRetrievalParams;
return tracer.startActiveSpan('Arsenal:: Performing Get API related operations using Cloudserver, Vault and Metadata', undefined, activeTracerContext, cloudserverApiSpan => {
activeSpan.addEvent('Request validated, routing request using routeGET() in arsenal');
activeSpan.addEvent('Detecting which API to route to using arsenal routeGET()')
log.debug('routing request', { method: 'routerGET' }); log.debug('routing request', { method: 'routerGET' });
const { bucketName, objectKey, query } = request as any const { bucketName, objectKey, query } = request as any
const call = (name: string) => { const call = (name: string) => {
api.callApiMethod(name, request, response, log, (err, xml, corsHeaders) => { const action = actionMonitoringMapS3[name];
// @ts-ignore
activeSpan.updateName(`S3 API request`);
activeSpan.addEvent(`Detected ${action} API request`);
activeSpan.setAttribute('rpc.method', action);
return api.callApiMethod(name, request, response, log, (err, xml, corsHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent(`${action} API operation complete`);
if (err) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseXMLBody(err, xml, response, log, corsHeaders); return routesUtils.responseXMLBody(err, xml, response, log, corsHeaders);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} }
if (bucketName === undefined && objectKey !== undefined) { if (bucketName === undefined && objectKey !== undefined) {
routesUtils.responseXMLBody(errors.NoSuchBucket, null, response, log); activeSpan.recordException(errors.NoSuchBucket);
cloudserverApiSpan.end();
return routesUtils.responseXMLBody(errors.NoSuchBucket, null, response, log);
} else if (bucketName === undefined && objectKey === undefined) { } else if (bucketName === undefined && objectKey === undefined) {
// GET service // GET service
call('serviceGet'); call('serviceGet');
} else if (objectKey === undefined) { } else if (objectKey === undefined) {
// GET bucket ACL // GET bucket ACL
@ -58,10 +88,6 @@ export default function routerGET(
call('bucketGetNotification'); call('bucketGetNotification');
} else if (query.encryption !== undefined) { } else if (query.encryption !== undefined) {
call('bucketGetEncryption'); call('bucketGetEncryption');
} else if (query.search !== undefined) {
call('metadataSearch')
} else if (query.quota !== undefined) {
call('bucketGetQuota');
} else { } else {
// GET bucket // GET bucket
call('bucketGet'); call('bucketGet');
@ -81,8 +107,14 @@ export default function routerGET(
call('objectGetRetention'); call('objectGetRetention');
} else { } else {
// GET object // GET object
api.callApiMethod('objectGet', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected GetObject API request');
activeSpan.setAttribute('aws.request_id', log.getUids()[0]);
activeSpan.setAttribute('rpc.method', 'GetObject');
return api.callApiMethod('objectGet', request, response, log,
(err, dataGetInfo, resMetaHeaders, range) => { (err, dataGetInfo, resMetaHeaders, range) => {
cloudserverApiSpan.end();
activeSpan.addEvent('Located Data, using arsenal to make GET request')
let contentLength = 0; let contentLength = 0;
if (resMetaHeaders && resMetaHeaders['Content-Length']) { if (resMetaHeaders && resMetaHeaders['Content-Length']) {
contentLength = resMetaHeaders['Content-Length']; contentLength = resMetaHeaders['Content-Length'];
@ -90,11 +122,20 @@ export default function routerGET(
// TODO ARSN-216 Fix logger // TODO ARSN-216 Fix logger
// @ts-ignore // @ts-ignore
log.end().addDefaultFields({ contentLength }); log.end().addDefaultFields({ contentLength });
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseStreamData(err, query, return routesUtils.responseStreamData(err, query,
resMetaHeaders, dataGetInfo, dataRetrievalParams, response, resMetaHeaders, dataGetInfo, dataRetrievalParams, response,
range, log); range, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} }
} }
});
} }

View File

@ -11,28 +11,75 @@ export default function routeHEAD(
api: { callApiMethod: routesUtils.CallApiMethod }, api: { callApiMethod: routesUtils.CallApiMethod },
log: RequestLogger, log: RequestLogger,
statsClient?: StatsClient, statsClient?: StatsClient,
dataRetrievalParams?: any,
) { ) {
const{
oTel: {
tracer,
activeSpan,
activeTracerContext,
}
} = dataRetrievalParams;
return tracer.startActiveSpan('Arsenal:: Performing Head API related operations using Cloudserver, Vault and Metadata', undefined, activeTracerContext, cloudserverApiSpan => {
activeSpan.addEvent('Request validated, routing request using routeHEAD() in arsenal');
cloudserverApiSpan.setAttributes({
'code.lineno': 8,
'code.filename': 'lib/s3routes/routes/routeHEAD.ts',
'code.function': 'routeHEAD()',
});
activeSpan.addEvent('Detecting which API to route to using arsenal routeHEAD()');
log.debug('routing request', { method: 'routeHEAD' }); log.debug('routing request', { method: 'routeHEAD' });
const { bucketName, objectKey } = request as any const { bucketName, objectKey } = request as any
if (bucketName === undefined) { if (bucketName === undefined) {
log.trace('head request without bucketName'); log.trace('head request without bucketName');
activeSpan.recordException(errors.MethodNotAllowed);
cloudserverApiSpan.end();
routesUtils.responseXMLBody(errors.MethodNotAllowed, routesUtils.responseXMLBody(errors.MethodNotAllowed,
null, response, log); null, response, log);
} else if (objectKey === undefined) { } else if (objectKey === undefined) {
activeSpan.updateName('S3 API request');
activeSpan.addEvent(`Detected HeadBucket API request`);
activeSpan.setAttribute('rpc.method', 'HeadBucket');
// HEAD bucket // HEAD bucket
api.callApiMethod('bucketHead', request, response, log, api.callApiMethod('bucketHead', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
activeSpan.addEvent('HeadBucket API operation complete')
cloudserverApiSpan.end();
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseNoBody(err, corsHeaders, response, return routesUtils.responseNoBody(err, corsHeaders, response,
200, log); 200, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} else { } else {
// HEAD object // HEAD object
activeSpan.updateName('S3 API request');
activeSpan.addEvent(`Detected HeadObject API request`);
activeSpan.setAttribute('rpc.method', 'HeadObject');
api.callApiMethod('objectHead', request, response, log, api.callApiMethod('objectHead', request, response, log,
(err, resHeaders) => { (err, resHeaders) => {
activeSpan.addEvent('HeadObject API operation complete')
cloudserverApiSpan.end();
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseContentHeaders(err, {}, resHeaders, return routesUtils.responseContentHeaders(err, {}, resHeaders,
response, log); response, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} }
});
} }

View File

@ -11,7 +11,23 @@ export default function routeOPTIONS(
api: { callApiMethod: routesUtils.CallApiMethod }, api: { callApiMethod: routesUtils.CallApiMethod },
log: RequestLogger, log: RequestLogger,
statsClient?: StatsClient, statsClient?: StatsClient,
dataRetrievalParams?: any,
) { ) {
const {
oTel: {
tracer,
activeSpan,
activeTracerContext,
}
} = dataRetrievalParams;
return tracer.startActiveSpan('Arsenal:: Performing corsPreflight API related operations using Cloudserver, Vault and Metadata', undefined, activeTracerContext, cloudserverApiSpan => {
activeSpan.addEvent('Request validated, routing request using routeOPTIONS() in arsenal');
cloudserverApiSpan.setAttributes({
'code.lineno': 8,
'code.filename': 'lib/s3routes/routes/routeOPTIONS.ts',
'code.function': 'routeOPTIONS()',
});
activeSpan.addEvent('Detecting which API to route to using arsenal routeOPTIONS()');
log.debug('routing request', { method: 'routeOPTION' }); log.debug('routing request', { method: 'routeOPTION' });
const corsMethod = request.headers['access-control-request-method'] || null; const corsMethod = request.headers['access-control-request-method'] || null;
@ -19,21 +35,40 @@ export default function routeOPTIONS(
if (!request.headers.origin) { if (!request.headers.origin) {
const msg = 'Insufficient information. Origin request header needed.'; const msg = 'Insufficient information. Origin request header needed.';
const err = errors.BadRequest.customizeDescription(msg); const err = errors.BadRequest.customizeDescription(msg);
activeSpan.recordException(err);
cloudserverApiSpan.end();
log.debug('missing origin', { method: 'routeOPTIONS', error: err }); log.debug('missing origin', { method: 'routeOPTIONS', error: err });
return routesUtils.responseXMLBody(err, null, response, log); return routesUtils.responseXMLBody(err, null, response, log);
} }
if (['GET', 'PUT', 'HEAD', 'POST', 'DELETE'].indexOf(corsMethod ?? '') < 0) { if (['GET', 'PUT', 'HEAD', 'POST', 'DELETE'].indexOf(corsMethod ?? '') < 0) {
const msg = `Invalid Access-Control-Request-Method: ${corsMethod}`; const msg = `Invalid Access-Control-Request-Method: ${corsMethod}`;
const err = errors.BadRequest.customizeDescription(msg); const err = errors.BadRequest.customizeDescription(msg);
activeSpan.recordException(err);
cloudserverApiSpan.end();
log.debug('invalid Access-Control-Request-Method', log.debug('invalid Access-Control-Request-Method',
{ method: 'routeOPTIONS', error: err }); { method: 'routeOPTIONS', error: err });
return routesUtils.responseXMLBody(err, null, response, log); return routesUtils.responseXMLBody(err, null, response, log);
} }
// @ts-ignore
activeSpan.updateName('S3 API request');
activeSpan.addEvent(`Detected corsPreflight API request`);
activeSpan.setAttribute('rpc.method', 'corsPreflight');
return api.callApiMethod('corsPreflight', request, response, log, return api.callApiMethod('corsPreflight', request, response, log,
(err, resHeaders) => { (err, resHeaders) => {
activeSpan.addEvent('corsPreflight API operation complete');
cloudserverApiSpan.end();
if (err) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseNoBody(err, resHeaders, response, 200, return routesUtils.responseNoBody(err, resHeaders, response, 200,
log); log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
});
}); });
} }

View File

@ -3,6 +3,7 @@ import { RequestLogger } from 'werelogs';
import * as routesUtils from '../routesUtils'; import * as routesUtils from '../routesUtils';
import errors from '../../errors'; import errors from '../../errors';
import * as http from 'http'; import * as http from 'http';
import StatsClient from '../../metrics/StatsClient';
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
export default function routePOST( export default function routePOST(
@ -10,7 +11,24 @@ export default function routePOST(
response: http.ServerResponse, response: http.ServerResponse,
api: { callApiMethod: routesUtils.CallApiMethod }, api: { callApiMethod: routesUtils.CallApiMethod },
log: RequestLogger, log: RequestLogger,
statsClient?: StatsClient,
dataRetrievalParams?: any,
) { ) {
const {
oTel: {
tracer,
activeSpan,
activeTracerContext,
}
} = dataRetrievalParams;
return tracer.startActiveSpan('Arsenal:: Performing Post API related operations using Cloudserver, Vault and Metadata', undefined, activeTracerContext, cloudserverApiSpan => {
activeSpan.addEvent('Request validated, routing request using routePOST() in arsenal');
cloudserverApiSpan.setAttributes({
'code.lineno': 9,
'code.filename': 'lib/s3routes/routes/routePOST.ts',
'code.function': 'routePOST()',
});
activeSpan.addEvent('Detecting which API to route to using arsenal routePOST()');
log.debug('routing request', { method: 'routePOST' }); log.debug('routing request', { method: 'routePOST' });
const { query, bucketName, objectKey } = request as any const { query, bucketName, objectKey } = request as any
@ -18,6 +36,8 @@ export default function routePOST(
const invalidMultiObjectDelReq = query.delete !== undefined const invalidMultiObjectDelReq = query.delete !== undefined
&& bucketName === undefined; && bucketName === undefined;
if (invalidMultiObjectDelReq) { if (invalidMultiObjectDelReq) {
activeSpan.recordException(errors.MethodNotAllowed);
cloudserverApiSpan.end();
return routesUtils.responseNoBody(errors.MethodNotAllowed, null, return routesUtils.responseNoBody(errors.MethodNotAllowed, null,
response, undefined, log); response, undefined, log);
} }
@ -30,42 +50,76 @@ export default function routePOST(
const invalidCompleteMpuReq = query.uploadId !== undefined const invalidCompleteMpuReq = query.uploadId !== undefined
&& objectKey === undefined; && objectKey === undefined;
if (invalidInitiateMpuReq || invalidCompleteMpuReq) { if (invalidInitiateMpuReq || invalidCompleteMpuReq) {
activeSpan.recordException(errors.InvalidURI);
cloudserverApiSpan.end();
return routesUtils.responseNoBody(errors.InvalidURI, null, return routesUtils.responseNoBody(errors.InvalidURI, null,
response, undefined, log); response, undefined, log);
} }
// POST initiate multipart upload // POST initiate multipart upload
if (query.uploads !== undefined) { if (query.uploads !== undefined) {
activeSpan.updateName('S3 API request');
activeSpan.addEvent(`Detected CreateMultipartUpload API request`);
activeSpan.setAttribute('rpc.method', 'CreateMultipartUpload');
return api.callApiMethod('initiateMultipartUpload', request, return api.callApiMethod('initiateMultipartUpload', request,
response, log, (err, result, corsHeaders) => response, log, (err, result, corsHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('CreateMultipartUpload API operation complete');
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
routesUtils.responseXMLBody(err, result, response, log, routesUtils.responseXMLBody(err, result, response, log,
corsHeaders)); corsHeaders)
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
});
} }
// POST complete multipart upload // POST complete multipart upload
if (query.uploadId !== undefined) { if (query.uploadId !== undefined) {
activeSpan.updateName('S3 API request');
activeSpan.addEvent(`Detected CompleteMultipartUpload API request`);
activeSpan.setAttribute('rpc.method', 'CompleteMultipartUpload');
return api.callApiMethod('completeMultipartUpload', request, return api.callApiMethod('completeMultipartUpload', request,
response, log, (err, result, resHeaders) => response, log, (err, result, resHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('CompleteMultipartUpload API operation complete');
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
routesUtils.responseXMLBody(err, result, response, log, routesUtils.responseXMLBody(err, result, response, log,
resHeaders)); resHeaders)
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
});
} }
// POST multiObjectDelete // POST multiObjectDelete
if (query.delete !== undefined) { if (query.delete !== undefined) {
activeSpan.updateName('S3 API request');
activeSpan.addEvent(`Detected AbortMultipartUpload API request`);
activeSpan.setAttribute('rpc.method', 'AbortMultipartUpload');
return api.callApiMethod('multiObjectDelete', request, response, return api.callApiMethod('multiObjectDelete', request, response,
log, (err, xml, corsHeaders) => log, (err, xml, corsHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('AbortMultipartUpload API operation complete');
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
routesUtils.responseXMLBody(err, xml, response, log, routesUtils.responseXMLBody(err, xml, response, log,
corsHeaders)); corsHeaders)
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
});
} }
activeSpan.recordException(errors.NotImplemented);
// POST Object restore cloudserverApiSpan.end();
if (query.restore !== undefined) { activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return api.callApiMethod('objectRestore', request, response,
log, (err, statusCode, resHeaders) =>
routesUtils.responseNoBody(err, resHeaders, response,
statusCode, log));
}
return routesUtils.responseNoBody(errors.NotImplemented, null, response, return routesUtils.responseNoBody(errors.NotImplemented, null, response,
200, log); 200, log);
});
} }

View File

@ -11,7 +11,23 @@ export default function routePUT(
api: { callApiMethod: routesUtils.CallApiMethod }, api: { callApiMethod: routesUtils.CallApiMethod },
log: RequestLogger, log: RequestLogger,
statsClient?: StatsClient, statsClient?: StatsClient,
dataRetrievalParams?: any,
) { ) {
const {
oTel: {
tracer,
activeSpan,
activeTracerContext,
},
} = dataRetrievalParams;
return tracer.startActiveSpan('Arsenal:: Performing Put API related operations using Cloudserver, Vault and Metadata', undefined, activeTracerContext, cloudserverApiSpan => {
activeSpan.addEvent('Request validated, routing request using routePUT() in arsenal')
cloudserverApiSpan.setAttributes({
'code.lineno': 8,
'code.filename': 'lib/s3routes/routes/routePUT.ts',
'code.function': 'routePUT()',
})
activeSpan.addEvent('Detecting which API to route to using arsenal routePUT()')
log.debug('routing request', { method: 'routePUT' }); log.debug('routing request', { method: 'routePUT' });
const { objectKey, query, bucketName, parsedContentLength } = request as any const { objectKey, query, bucketName, parsedContentLength } = request as any
@ -29,99 +45,260 @@ export default function routePUT(
} }
// PUT bucket ACL // PUT bucket ACL
if (query.acl !== undefined) { if (query.acl !== undefined) {
api.callApiMethod('bucketPutACL', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected PutBucketAcl API request');
activeSpan.setAttribute('rpc.method', 'PutBucketAcl');
return api.callApiMethod('bucketPutACL', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('PutBucketAcl API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 200, log); response, 200, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} else if (query.versioning !== undefined) { } else if (query.versioning !== undefined) {
api.callApiMethod('bucketPutVersioning', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected PutBucketVersioning API request');
activeSpan.setAttribute('rpc.method', 'PutBucketVersioning');
return api.callApiMethod('bucketPutVersioning', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('PutBucketVersioning API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
routesUtils.responseNoBody(err, corsHeaders, response, 200, routesUtils.responseNoBody(err, corsHeaders, response, 200,
log); log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} else if (query.website !== undefined) { } else if (query.website !== undefined) {
api.callApiMethod('bucketPutWebsite', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected PutBucketWebsite API request');
activeSpan.setAttribute('rpc.method', 'PutBucketWebsite');
return api.callApiMethod('bucketPutWebsite', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('PutBucketWebsite API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 200, log); response, 200, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} else if (query.tagging !== undefined) { } else if (query.tagging !== undefined) {
api.callApiMethod('bucketPutTagging', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected PutBucketTagging API request');
activeSpan.setAttribute('rpc.method', 'PutBucketTagging');
return api.callApiMethod('bucketPutTagging', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('PutBucketTagging API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 200, log); response, 200, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} else if (query.cors !== undefined) { } else if (query.cors !== undefined) {
api.callApiMethod('bucketPutCors', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected PutBucketCors API request');
activeSpan.setAttribute('rpc.method', 'PutBucketCors');
return api.callApiMethod('bucketPutCors', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('PutBucketCors API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 200, log); response, 200, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} else if (query.replication !== undefined) { } else if (query.replication !== undefined) {
api.callApiMethod('bucketPutReplication', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected PutBucketReplication API request');
activeSpan.setAttribute('rpc.method', 'PutBucketReplication');
return api.callApiMethod('bucketPutReplication', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('PutBucketReplication API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
routesUtils.responseNoBody(err, corsHeaders, response, 200, routesUtils.responseNoBody(err, corsHeaders, response, 200,
log); log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} else if (query.lifecycle !== undefined) { } else if (query.lifecycle !== undefined) {
api.callApiMethod('bucketPutLifecycle', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected PutBucketLifecycle API request');
activeSpan.setAttribute('rpc.method', 'PutBucketLifecycle');
return api.callApiMethod('bucketPutLifecycle', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('PutBucketLifecycle API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
routesUtils.responseNoBody(err, corsHeaders, response, 200, routesUtils.responseNoBody(err, corsHeaders, response, 200,
log); log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} else if (query.policy !== undefined) { } else if (query.policy !== undefined) {
api.callApiMethod('bucketPutPolicy', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected PutBucketPolicy API request');
activeSpan.setAttribute('rpc.method', 'PutBucketPolicy');
return api.callApiMethod('bucketPutPolicy', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('PutBucketPolicy API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
routesUtils.responseNoBody(err, corsHeaders, response, 200, routesUtils.responseNoBody(err, corsHeaders, response, 200,
log); log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} else if (query['object-lock'] !== undefined) { } else if (query['object-lock'] !== undefined) {
api.callApiMethod('bucketPutObjectLock', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected PutObjectLockConfiguration API request');
activeSpan.setAttribute('rpc.method', 'PutObjectLockConfiguration');
return api.callApiMethod('bucketPutObjectLock', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('PutObjectLockConfiguration API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
routesUtils.responseNoBody(err, corsHeaders, response, 200, routesUtils.responseNoBody(err, corsHeaders, response, 200,
log); log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} else if (query.notification !== undefined) { } else if (query.notification !== undefined) {
api.callApiMethod('bucketPutNotification', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected PutBucketNotificationConfiguration API request');
activeSpan.setAttribute('rpc.method', 'PutBucketNotificationConfiguration');
return api.callApiMethod('bucketPutNotification', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('PutBucketNotificationConfiguration API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
routesUtils.responseNoBody(err, corsHeaders, response, 200, routesUtils.responseNoBody(err, corsHeaders, response, 200,
log); log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} else if (query.encryption !== undefined) { } else if (query.encryption !== undefined) {
api.callApiMethod('bucketPutEncryption', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected PutBucketEncryption API request');
activeSpan.setAttribute('rpc.method', 'PutBucketEncryption');
return api.callApiMethod('bucketPutEncryption', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('PutBucketEncryption API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 200, log); response, 200, log);
}); }, {
} else if (query.quota !== undefined) { cloudserverApiSpan,
api.callApiMethod('bucketUpdateQuota', request, response, activeSpan,
log, (err, resHeaders) => { activeTracerContext,
routesUtils.statsReport500(err, statsClient); tracer,
return routesUtils.responseNoBody(err, resHeaders, response,
200, log);
}); });
} else { } else {
// PUT bucket // PUT bucket
activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected PutBucket API request');
activeSpan.setAttribute('rpc.method', 'PutBucket');
return api.callApiMethod('bucketPut', request, response, log, return api.callApiMethod('bucketPut', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('PutBucket API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
const location = { Location: `/${bucketName}` }; const location = { Location: `/${bucketName}` };
const resHeaders = corsHeaders ? const resHeaders = corsHeaders ?
Object.assign({}, location, corsHeaders) : location; Object.assign({}, location, corsHeaders) : location;
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseNoBody(err, resHeaders, return routesUtils.responseNoBody(err, resHeaders,
response, 200, log); response, 200, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} }
} else { } else {
@ -131,6 +308,8 @@ export default function routePUT(
// parse content-md5 from meta headers // parse content-md5 from meta headers
if (request.headers['content-md5'] === '') { if (request.headers['content-md5'] === '') {
activeSpan.recordException(errors.InvalidDigest);
cloudserverApiSpan.end();
log.debug('empty content-md5 header', { log.debug('empty content-md5 header', {
method: 'routePUT', method: 'routePUT',
}); });
@ -150,6 +329,8 @@ export default function routePUT(
request.contentMD5 = Buffer.from(request.contentMD5, 'base64').toString('hex'); request.contentMD5 = Buffer.from(request.contentMD5, 'base64').toString('hex');
// @ts-ignore // @ts-ignore
if (request.contentMD5 && request.contentMD5.length !== 32) { if (request.contentMD5 && request.contentMD5.length !== 32) {
activeSpan.recordException(errors.InvalidDigest);
cloudserverApiSpan.end();
// @ts-ignore // @ts-ignore
log.debug('invalid md5 digest', { contentMD5: request.contentMD5 }); log.debug('invalid md5 digest', { contentMD5: request.contentMD5 });
return routesUtils return routesUtils
@ -159,15 +340,34 @@ export default function routePUT(
} }
if (query.partNumber) { if (query.partNumber) {
if (request.headers['x-amz-copy-source']) { if (request.headers['x-amz-copy-source']) {
api.callApiMethod('objectPutCopyPart', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected UploadPartCopy API request');
activeSpan.setAttribute('rpc.method', 'UploadPartCopy');
return api.callApiMethod('objectPutCopyPart', request, response, log,
(err, xml, additionalHeaders) => { (err, xml, additionalHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('UploadPartCopy API operation complete');
if (err) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseXMLBody(err, xml, response, log, return routesUtils.responseXMLBody(err, xml, response, log,
additionalHeaders); additionalHeaders);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} else { } else {
api.callApiMethod('objectPutPart', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected UploadPart API request');
activeSpan.setAttribute('rpc.method', 'UploadPart');
return api.callApiMethod('objectPutPart', request, response, log,
(err, calculatedHash, corsHeaders) => { (err, calculatedHash, corsHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('UploadPart API operation complete');
if (err) { if (err) {
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 200, log); response, 200, log);
@ -175,66 +375,164 @@ export default function routePUT(
// ETag's hex should always be enclosed in quotes // ETag's hex should always be enclosed in quotes
const resMetaHeaders = corsHeaders || {}; const resMetaHeaders = corsHeaders || {};
resMetaHeaders.ETag = `"${calculatedHash}"`; resMetaHeaders.ETag = `"${calculatedHash}"`;
if (err) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseNoBody(err, resMetaHeaders, return routesUtils.responseNoBody(err, resMetaHeaders,
response, 200, log); response, 200, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} }
} else if (query.acl !== undefined) { } else if (query.acl !== undefined) {
api.callApiMethod('objectPutACL', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected PutObjectAcl API request');
activeSpan.setAttribute('rpc.method', 'PutObjectAcl');
return api.callApiMethod('objectPutACL', request, response, log,
(err, resHeaders) => { (err, resHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('PutObjectAcl API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseNoBody(err, resHeaders, return routesUtils.responseNoBody(err, resHeaders,
response, 200, log); response, 200, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} else if (query['legal-hold'] !== undefined) { } else if (query['legal-hold'] !== undefined) {
api.callApiMethod('objectPutLegalHold', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected PutObjectLegalHold API request');
activeSpan.setAttribute('rpc.method', 'PutObjectLegalHold');
return api.callApiMethod('objectPutLegalHold', request, response, log,
(err, resHeaders) => { (err, resHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('PutObjectLegalHold API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseNoBody(err, resHeaders, return routesUtils.responseNoBody(err, resHeaders,
response, 200, log); response, 200, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} else if (query.tagging !== undefined) { } else if (query.tagging !== undefined) {
api.callApiMethod('objectPutTagging', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected PutObjectTagging API request');
activeSpan.setAttribute('rpc.method', 'PutObjectTagging');
return api.callApiMethod('objectPutTagging', request, response, log,
(err, resHeaders) => { (err, resHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('PutObjectTagging API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseNoBody(err, resHeaders, return routesUtils.responseNoBody(err, resHeaders,
response, 200, log); response, 200, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} else if (query.retention !== undefined) { } else if (query.retention !== undefined) {
api.callApiMethod('objectPutRetention', request, response, log, activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected PutObjectRetention API request');
activeSpan.setAttribute('rpc.method', 'PutObjectRetention');
return api.callApiMethod('objectPutRetention', request, response, log,
(err, resHeaders) => { (err, resHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('PutObjectRetention API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseNoBody(err, resHeaders, return routesUtils.responseNoBody(err, resHeaders,
response, 200, log); response, 200, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} else if (request.headers['x-amz-copy-source']) { } else if (request.headers['x-amz-copy-source']) {
activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected CopyObject API request');
activeSpan.setAttribute('rpc.method', 'CopyObject');
return api.callApiMethod('objectCopy', request, response, log, return api.callApiMethod('objectCopy', request, response, log,
(err, xml, additionalHeaders) => { (err, xml, additionalHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('CopyObject API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
routesUtils.responseXMLBody(err, xml, response, log, routesUtils.responseXMLBody(err, xml, response, log,
additionalHeaders); additionalHeaders);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} else { } else {
if (request.headers['content-length'] === undefined && if (request.headers['content-length'] === undefined &&
request.headers['x-amz-decoded-content-length'] === undefined) { request.headers['x-amz-decoded-content-length'] === undefined) {
activeSpan.recordException(errors.MissingContentLength);
cloudserverApiSpan.end();
return routesUtils.responseNoBody(errors.MissingContentLength, return routesUtils.responseNoBody(errors.MissingContentLength,
null, response, 411, log); null, response, 411, log);
} }
if (Number.isNaN(parsedContentLength) || parsedContentLength < 0) { if (Number.isNaN(parsedContentLength) || parsedContentLength < 0) {
activeSpan.recordException(errors.BadRequest);
cloudserverApiSpan.end();
return routesUtils.responseNoBody(errors.BadRequest, return routesUtils.responseNoBody(errors.BadRequest,
null, response, 400, log); null, response, 400, log);
} }
// TODO ARSN-216 What's happening? // TODO ARSN-216 What's happening?
// @ts-ignore // @ts-ignore
log.end().addDefaultFields({ contentLength: request.parsedContentLength }); log.end().addDefaultFields({ contentLength: request.parsedContentLength });
activeSpan.updateName('S3 API request');
activeSpan.addEvent('Detected PutObject API request');
activeSpan.setAttribute('rpc.method', 'PutObject');
api.callApiMethod('objectPut', request, response, log, api.callApiMethod('objectPut', request, response, log,
(err, resHeaders) => { (err, resHeaders) => {
cloudserverApiSpan.end();
activeSpan.addEvent('PutObject API operation complete');
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseNoBody(err, resHeaders, return routesUtils.responseNoBody(err, resHeaders,
response, 200, log); response, 200, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} }
} }
return undefined; return undefined;
});
} }

View File

@ -11,29 +11,56 @@ export default function routerWebsite(
api: { callApiMethod: routesUtils.CallApiMethod }, api: { callApiMethod: routesUtils.CallApiMethod },
log: RequestLogger, log: RequestLogger,
statsClient?: StatsClient, statsClient?: StatsClient,
dataRetrievalParams?: any, dataRetrievalFn?: any,
) { ) {
const {
oTel: {
tracer,
activeSpan,
activeTracerContext,
}
} = dataRetrievalFn;
return tracer.startActiveSpan('Arsenal:: Performing Website API related operations using Cloudserver, Vault and Metadata', undefined, activeTracerContext, cloudserverApiSpan => {
activeSpan.addEvent('Request validated, routing request using routerWebsite() in arsenal');
cloudserverApiSpan.setAttributes({
'code.lineno': 8,
'code.filename': 'lib/s3routes/routes/routeWebsite.ts',
'code.function': 'routerWebsite()',
});
activeSpan.addEvent('Detecting which API to route to using arsenal routerWebsite()');
const { bucketName, query } = request as any const { bucketName, query } = request as any
log.debug('routing request', { method: 'routerWebsite' }); log.debug('routing request', { method: 'routerWebsite' });
// website endpoint only supports GET and HEAD and must have a bucket // website endpoint only supports GET and HEAD and must have a bucket
// http://docs.aws.amazon.com/AmazonS3/latest/dev/WebsiteEndpoints.html // http://docs.aws.amazon.com/AmazonS3/latest/dev/WebsiteEndpoints.html
if ((request.method !== 'GET' && request.method !== 'HEAD') if ((request.method !== 'GET' && request.method !== 'HEAD')
|| !bucketName) { || !bucketName) {
activeSpan.recordException(errors.MethodNotAllowed);
cloudserverApiSpan.end();
return routesUtils.errorHtmlResponse(errors.MethodNotAllowed, return routesUtils.errorHtmlResponse(errors.MethodNotAllowed,
false, bucketName, response, null, log); false, bucketName, response, null, log);
} }
if (request.method === 'GET') { if (request.method === 'GET') {
activeSpan.updateName('S3 API request');
activeSpan.addEvent(`Detected GetWebsite API request`);
activeSpan.setAttribute('rpc.method', 'GetWebsite');
return api.callApiMethod('websiteGet', request, response, log, return api.callApiMethod('websiteGet', request, response, log,
(err, userErrorPageFailure, dataGetInfo, resMetaHeaders, (err, userErrorPageFailure, dataGetInfo, resMetaHeaders,
redirectInfo, key) => { redirectInfo, key) => {
cloudserverApiSpan.end();
activeSpan.addEvent('Located Data')
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
// request being redirected // request being redirected
if (redirectInfo) { if (redirectInfo) {
if (err && redirectInfo.withError) { if (err && redirectInfo.withError) {
activeSpan.recordException(err);
return routesUtils.redirectRequestOnError(err, return routesUtils.redirectRequestOnError(err,
'GET', redirectInfo, dataGetInfo, dataRetrievalParams, 'GET', redirectInfo, dataGetInfo, dataRetrievalFn,
response, resMetaHeaders, log) response, resMetaHeaders, log)
} }
activeSpan.addEvent('Redirecting request');
// note that key might have been modified in websiteGet // note that key might have been modified in websiteGet
// api to add index document // api to add index document
return routesUtils.redirectRequest(redirectInfo, return routesUtils.redirectRequest(redirectInfo,
@ -44,31 +71,48 @@ export default function routerWebsite(
} }
// user has their own error page // user has their own error page
if (err && dataGetInfo) { if (err && dataGetInfo) {
activeSpan.recordException(err);
return routesUtils.streamUserErrorPage(err, dataGetInfo, return routesUtils.streamUserErrorPage(err, dataGetInfo,
dataRetrievalParams, response, resMetaHeaders, log); dataRetrievalFn, response, resMetaHeaders, log);
} }
// send default error html response // send default error html response
if (err) { if (err) {
activeSpan.recordException(err);
return routesUtils.errorHtmlResponse(err, return routesUtils.errorHtmlResponse(err,
userErrorPageFailure, bucketName, userErrorPageFailure, bucketName,
response, resMetaHeaders, log); response, resMetaHeaders, log);
} }
// no error, stream data // no error, stream data
return routesUtils.responseStreamData(null, query, return routesUtils.responseStreamData(null, query,
resMetaHeaders, dataGetInfo, dataRetrievalParams, response, resMetaHeaders, dataGetInfo, dataRetrievalFn, response,
undefined, log); undefined, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} }
if (request.method === 'HEAD') { if (request.method === 'HEAD') {
activeSpan.updateName('S3 API request');
activeSpan.addEvent(`Detected HeadWebsite API request`);
activeSpan.setAttribute('rpc.method', 'HeadWebsite');
return api.callApiMethod('websiteHead', request, response, log, return api.callApiMethod('websiteHead', request, response, log,
(err, resMetaHeaders, redirectInfo, key) => { (err, resMetaHeaders, redirectInfo, key) => {
cloudserverApiSpan.end();
activeSpan.addEvent('HeadWebsite API operation complete')
if (err?.code === 500) {
activeSpan.recordException(err);
}
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
if (redirectInfo) { if (redirectInfo) {
if (err && redirectInfo.withError) { if (err && redirectInfo.withError) {
activeSpan.recordException(err);
return routesUtils.redirectRequestOnError(err, return routesUtils.redirectRequestOnError(err,
'HEAD', redirectInfo, null, dataRetrievalParams, 'HEAD', redirectInfo, null, dataRetrievalFn,
response, resMetaHeaders, log) response, resMetaHeaders, log)
} }
activeSpan.addEvent('Redirecting request');
return routesUtils.redirectRequest(redirectInfo, return routesUtils.redirectRequest(redirectInfo,
// TODO ARSN-217 encrypted does not exists in request.connection // TODO ARSN-217 encrypted does not exists in request.connection
// @ts-ignore // @ts-ignore
@ -77,12 +121,21 @@ export default function routerWebsite(
} }
// could redirect on err so check for redirectInfo first // could redirect on err so check for redirectInfo first
if (err) { if (err) {
activeSpan.recordException(err);
return routesUtils.errorHeaderResponse(err, response, return routesUtils.errorHeaderResponse(err, response,
resMetaHeaders, log); resMetaHeaders, log);
} }
activeSpan.addEvent('Finalizing Response with Content Headers and sending response to client');
return routesUtils.responseContentHeaders(err, {}, resMetaHeaders, return routesUtils.responseContentHeaders(err, {}, resMetaHeaders,
response, log); response, log);
}, {
cloudserverApiSpan,
activeSpan,
activeTracerContext,
tracer,
}); });
} }
return undefined; return undefined;
});
} }

View File

@ -1,6 +1,7 @@
import * as url from 'url'; import * as url from 'url';
import * as http from 'http'; import * as http from 'http';
import { eachSeries } from 'async'; import { eachSeries } from 'async';
const opentelemetry = require('@opentelemetry/api');
import { RequestLogger } from 'werelogs'; import { RequestLogger } from 'werelogs';
@ -9,8 +10,6 @@ import errors, { ArsenalError } from '../errors';
import * as constants from '../constants'; import * as constants from '../constants';
import DataWrapper from '../storage/data/DataWrapper'; import DataWrapper from '../storage/data/DataWrapper';
import StatsClient from '../metrics/StatsClient'; import StatsClient from '../metrics/StatsClient';
import { objectKeyByteLimit } from '../constants';
const jsutil = require('../jsutil');
export type CallApiMethod = ( export type CallApiMethod = (
methodName: string, methodName: string,
@ -18,6 +17,10 @@ export type CallApiMethod = (
response: http.ServerResponse, response: http.ServerResponse,
log: RequestLogger, log: RequestLogger,
callback: (err: ArsenalError | null, ...data: any[]) => void, callback: (err: ArsenalError | null, ...data: any[]) => void,
cloudserverApiSpan?: any,
activeSpan?: any,
activeTracerContext?: any,
tracer?: any,
) => void; ) => void;
/** /**
@ -149,15 +152,6 @@ const XMLResponseBackend = {
'<Error>', '<Error>',
`<Code>${errCode.message}</Code>`, `<Code>${errCode.message}</Code>`,
`<Message>${errCode.description}</Message>`, `<Message>${errCode.description}</Message>`,
);
const invalidArguments = errCode.metadata.get('invalidArguments') || [];
invalidArguments.forEach((invalidArgument, index) => {
const counter = index + 1;
const { ArgumentName, ArgumentValue } = invalidArgument as any;
xml.push(`<ArgumentName${counter}>${ArgumentName}</ArgumentName${counter}>`);
xml.push(`<ArgumentValue${counter}>${ArgumentValue}</ArgumentValue${counter}>`);
});
xml.push(
'<Resource></Resource>', '<Resource></Resource>',
`<RequestId>${log.getSerializedUids()}</RequestId>`, `<RequestId>${log.getSerializedUids()}</RequestId>`,
'</Error>', '</Error>',
@ -227,18 +221,9 @@ const JSONResponseBackend = {
"requestId": "4442587FB7D0A2F9" "requestId": "4442587FB7D0A2F9"
} }
*/ */
const invalidArguments = errCode.metadata.get('invalidArguments') || [];
const invalids = invalidArguments.reduce((acc, invalidArgument, index) => {
const counter = index + 1;
const { ArgumentName, ArgumentValue } = invalidArgument as any;
const name = `ArgumentName${counter}`;
const value = `ArgumentValue${counter}`;
return { ...acc, [name]: ArgumentName, [value]: ArgumentValue };
}, {});
const data = JSON.stringify({ const data = JSON.stringify({
code: errCode.message, code: errCode.message,
message: errCode.description, message: errCode.description,
...invalids,
resource: null, resource: null,
requestId: log.getSerializedUids(), requestId: log.getSerializedUids(),
}); });
@ -370,6 +355,10 @@ function retrieveData(
retrieveDataParams: any, retrieveDataParams: any,
response: http.ServerResponse, response: http.ServerResponse,
log: RequestLogger, log: RequestLogger,
sproxydSpan?: any,
activeSpan?: any,
activeTracerContext?: any,
tracer?: any,
) { ) {
if (locations.length === 0) { if (locations.length === 0) {
return response.end(); return response.end();
@ -385,18 +374,14 @@ function retrieveData(
response.destroy(); response.destroy();
responseDestroyed = true; responseDestroyed = true;
}; };
const _destroyReadable = (readable: http.IncomingMessage | null) => {
// s3-data sends Readable stream only which does not implement destroy
if (readable && readable.destroy) {
readable.destroy();
}
};
// the S3-client might close the connection while we are processing it // the S3-client might close the connection while we are processing it
response.once('close', () => { response.once('close', () => {
activeSpan.addEvent('response closed by client request');
sproxydSpan?.end();
responseDestroyed = true; responseDestroyed = true;
_destroyReadable(currentStream); if (currentStream) {
currentStream.destroy();
}
}); });
const { const {
@ -410,10 +395,15 @@ function retrieveData(
} = retrieveDataParams; } = retrieveDataParams;
const data = new DataWrapper( const data = new DataWrapper(
client, implName, config, kms, metadata, locStorageCheckFn, vault); client, implName, config, kms, metadata, locStorageCheckFn, vault);
return tracer.startActiveSpan('Streaming Data Using Sproxyd', dataSpan => {
dataSpan.setAttributes({
'code.function': 'Arsenal:: retrieveData()',
'code.filepath': 'lib/s3routes/routesUtils.js',
'code.lineno': 349,
});
return eachSeries(locations, return eachSeries(locations,
(current, next) => data.get(current, response, log, (current, next) => data.get(current, response, log,
(err: any, readable: http.IncomingMessage) => { (err: any, readable: http.IncomingMessage) => {
const cbOnce = jsutil.once(next);
// NB: readable is of IncomingMessage type // NB: readable is of IncomingMessage type
if (err) { if (err) {
log.error('failed to get object', { log.error('failed to get object', {
@ -421,7 +411,7 @@ function retrieveData(
method: 'retrieveData', method: 'retrieveData',
}); });
_destroyResponse(); _destroyResponse();
return cbOnce(err); return next(err);
} }
// response.isclosed is set by the S3 server. Might happen if // response.isclosed is set by the S3 server. Might happen if
// the S3-client closes the connection before the first request // the S3-client closes the connection before the first request
@ -430,24 +420,30 @@ function retrieveData(
if (responseDestroyed || response.isclosed) { if (responseDestroyed || response.isclosed) {
log.debug( log.debug(
'response destroyed before readable could stream'); 'response destroyed before readable could stream');
_destroyReadable(readable); readable.destroy();
const responseErr = new Error(); const responseErr = new Error();
// @ts-ignore // @ts-ignore
responseErr.code = 'ResponseError'; responseErr.code = 'ResponseError';
responseErr.message = 'response closed by client request before all data sent'; responseErr.message = 'response closed by client request before all data sent';
return cbOnce(responseErr); return next(responseErr);
} }
// readable stream successfully consumed // readable stream successfully consumed
readable.on('end', () => { readable.on('end', () => {
sproxydSpan?.addEvent('Readable stream successfully consumed');
dataSpan.end();
sproxydSpan?.end();
currentStream = null; currentStream = null;
log.debug('readable stream end reached'); log.debug('readable stream end reached');
return cbOnce(); return next();
}); });
// errors on server side with readable stream // errors on server side with readable stream
readable.on('error', err => { readable.on('error', err => {
activeSpan.recordException(err);
dataSpan.end();
sproxydSpan?.end();
log.error('error piping data from source'); log.error('error piping data from source');
_destroyResponse(); _destroyResponse();
return cbOnce(err); return next(err);
}); });
currentStream = readable; currentStream = readable;
return readable.pipe(response, { end: false }); return readable.pipe(response, { end: false });
@ -463,6 +459,7 @@ function retrieveData(
response.end(); response.end();
}, },
); );
});
} }
function _responseBody( function _responseBody(
@ -633,6 +630,20 @@ export function responseStreamData(
range: [number, number] | undefined, range: [number, number] | undefined,
log: RequestLogger, log: RequestLogger,
) { ) {
const{
oTel: {
tracer,
activeSpan,
activeTracerContext,
},
} = retrieveDataParams;
activeSpan.addEvent('Request processed, getting Data from sproxyd');
return tracer.startActiveSpan('Getting Object Data from RING', undefined, activeTracerContext, sproxydSpan => {
sproxydSpan.setAttributes({
'code.function': 'Arsenal:: responseStreamData()',
'code.filepath': 'lib/s3routes/routesUtils.js',
'code.lineno': 609,
})
if (errCode && !response.headersSent) { if (errCode && !response.headersSent) {
return XMLResponseBackend.errorResponse(errCode, response, log, return XMLResponseBackend.errorResponse(errCode, response, log,
resHeaders); resHeaders);
@ -667,12 +678,14 @@ export function responseStreamData(
} }
response.on('finish', () => { response.on('finish', () => {
// TODO ARSN-216 Fix logger // TODO ARSN-216 Fix logger
activeSpan.addEvent('Data retrieved from sproxyd and sending response to client');
// @ts-expect-error // @ts-expect-error
log.end().info('responded with streamed content', { log.end().info('responded with streamed content', {
httpCode: response.statusCode, httpCode: response.statusCode,
}); });
}); });
return retrieveData(dataLocations, retrieveDataParams, response, log); return retrieveData(dataLocations, retrieveDataParams, response, log, sproxydSpan, activeSpan, activeTracerContext, tracer);
});
} }
/** /**
@ -693,18 +706,26 @@ export function streamUserErrorPage(
corsHeaders: { [key: string]: string }, corsHeaders: { [key: string]: string },
log: RequestLogger, log: RequestLogger,
) { ) {
const{
oTel: {
tracer,
activeSpan,
activeTracerContext,
},
} = retrieveDataParams;
setCommonResponseHeaders(corsHeaders, response, log); setCommonResponseHeaders(corsHeaders, response, log);
response.setHeader('x-amz-error-code', err.message); response.setHeader('x-amz-error-code', err.message);
response.setHeader('x-amz-error-message', err.description); response.setHeader('x-amz-error-message', err.description);
response.writeHead(err.code, { 'Content-type': 'text/html' }); response.writeHead(err.code, { 'Content-type': 'text/html' });
response.on('finish', () => { response.on('finish', () => {
// TODO ARSN-216 Fix logger // TODO ARSN-216 Fix logger
activeSpan.recordException(err);
// @ts-expect-error // @ts-expect-error
log.end().info('responded with streamed content', { log.end().info('responded with streamed content', {
httpCode: response.statusCode, httpCode: response.statusCode,
}); });
}); });
return retrieveData(dataLocations, retrieveDataParams, response, log); return retrieveData(dataLocations, retrieveDataParams, response, log, undefined, activeSpan, activeTracerContext, tracer);
} }
/** /**
@ -1155,9 +1176,6 @@ export function isValidObjectKey(objectKey: string, prefixBlacklist: string[]) {
if (invalidPrefix) { if (invalidPrefix) {
return { isValid: false, invalidPrefix }; return { isValid: false, invalidPrefix };
} }
if (Buffer.byteLength(objectKey, 'utf8') > objectKeyByteLimit) {
return { isValid: false };
}
return { isValid: true }; return { isValid: true };
} }

View File

@ -989,14 +989,13 @@ class DataWrapper {
return this.client.delete(objectGetInfo, log.getSerializedUids(), return this.client.delete(objectGetInfo, log.getSerializedUids(),
err => { err => {
if (err) { if (err) {
// TODO: sproxydclient and hdclient does not return standard Arsenal error yet. if (err.is.ObjNotFound) {
if (err.code === 404) {
log.info('no such key in datastore', { log.info('no such key in datastore', {
objectGetInfo, objectGetInfo,
implName: this.implName, implName: this.implName,
moreRetries: 'no', moreRetries: 'no',
}); });
return cb(errors.ObjNotFound); return cb(err);
} }
log.error('delete error from datastore', { log.error('delete error from datastore', {
error: err, error: err,

View File

@ -1,10 +1,11 @@
const { http, https } = require('httpagent'); const https = require('https');
const http = require('http');
const url = require('url'); const url = require('url');
const AWS = require('aws-sdk'); const AWS = require('aws-sdk');
const Sproxy = require('sproxydclient');
const Hyperdrive = require('hdclient');
const HttpsProxyAgent = require('https-proxy-agent'); const HttpsProxyAgent = require('https-proxy-agent');
require("aws-sdk/lib/maintenance_mode_message").suppress = true;
const constants = require('../../constants'); const constants = require('../../constants');
const DataFileBackend = require('./file/DataFileInterface'); const DataFileBackend = require('./file/DataFileInterface');
const inMemory = require('./in_memory/datastore').backend; const inMemory = require('./in_memory/datastore').backend;
@ -25,13 +26,8 @@ function parseLC(config, vault) {
if (locationObj.type === 'file') { if (locationObj.type === 'file') {
clients[location] = new DataFileBackend(config); clients[location] = new DataFileBackend(config);
} }
if (locationObj.type === 'vitastor') {
const VitastorBackend = require('./vitastor/VitastorBackend');
clients[location] = new VitastorBackend(location, locationObj.details);
}
if (locationObj.type === 'scality') { if (locationObj.type === 'scality') {
if (locationObj.details.connector.sproxyd) { if (locationObj.details.connector.sproxyd) {
const Sproxy = require('sproxydclient');
clients[location] = new Sproxy({ clients[location] = new Sproxy({
bootstrap: locationObj.details.connector bootstrap: locationObj.details.connector
.sproxyd.bootstrap, .sproxyd.bootstrap,
@ -46,7 +42,6 @@ function parseLC(config, vault) {
}); });
clients[location].clientType = 'scality'; clients[location].clientType = 'scality';
} else if (locationObj.details.connector.hdclient) { } else if (locationObj.details.connector.hdclient) {
const Hyperdrive = require('hdclient');
clients[location] = new Hyperdrive.hdcontroller.HDProxydClient( clients[location] = new Hyperdrive.hdcontroller.HDProxydClient(
locationObj.details.connector.hdclient); locationObj.details.connector.hdclient);
clients[location].clientType = 'scality'; clients[location].clientType = 'scality';
@ -82,8 +77,8 @@ function parseLC(config, vault) {
connectionAgent = new HttpsProxyAgent(options); connectionAgent = new HttpsProxyAgent(options);
} else { } else {
connectionAgent = sslEnabled ? connectionAgent = sslEnabled ?
new https.Agent(httpAgentConfig, { maxSockets: false }) : new https.Agent(httpAgentConfig) :
new http.Agent(httpAgentConfig, { maxSockets: false }); new http.Agent(httpAgentConfig);
} }
const httpOptions = { agent: connectionAgent, timeout: 0 }; const httpOptions = { agent: connectionAgent, timeout: 0 };
const s3Params = { const s3Params = {

View File

@ -5,7 +5,6 @@ const { parseTagFromQuery } = require('../../s3middleware/tagging');
const { externalBackendHealthCheckInterval } = require('../../constants'); const { externalBackendHealthCheckInterval } = require('../../constants');
const DataFileBackend = require('./file/DataFileInterface'); const DataFileBackend = require('./file/DataFileInterface');
const { createLogger, checkExternalBackend } = require('./external/utils'); const { createLogger, checkExternalBackend } = require('./external/utils');
const jsutil = require('../../jsutil');
class MultipleBackendGateway { class MultipleBackendGateway {
constructor(clients, metadata, locStorageCheckFn) { constructor(clients, metadata, locStorageCheckFn) {
@ -200,12 +199,11 @@ class MultipleBackendGateway {
uploadPart(request, streamingV4Params, stream, size, location, key, uploadPart(request, streamingV4Params, stream, size, location, key,
uploadId, partNumber, bucketName, log, cb) { uploadId, partNumber, bucketName, log, cb) {
const client = this.clients[location]; const client = this.clients[location];
const cbOnce = jsutil.once(cb);
if (client.uploadPart) { if (client.uploadPart) {
return this.locStorageCheckFn(location, size, log, err => { return this.locStorageCheckFn(location, size, log, err => {
if (err) { if (err) {
return cbOnce(err); return cb(err);
} }
return client.uploadPart(request, streamingV4Params, stream, return client.uploadPart(request, streamingV4Params, stream,
size, key, uploadId, partNumber, bucketName, log, size, key, uploadId, partNumber, bucketName, log,
@ -219,14 +217,14 @@ class MultipleBackendGateway {
'metric following object PUT failure', 'metric following object PUT failure',
{ error: error.message }); { error: error.message });
} }
return cbOnce(err); return cb(err);
}); });
} }
return cbOnce(null, partInfo); return cb(null, partInfo);
}); });
}); });
} }
return cbOnce(); return cb();
} }
listParts(key, uploadId, location, bucketName, partNumberMarker, maxParts, listParts(key, uploadId, location, bucketName, partNumberMarker, maxParts,

View File

@ -8,7 +8,6 @@ const getMetaHeaders =
const { prepareStream } = require('../../../s3middleware/prepareStream'); const { prepareStream } = require('../../../s3middleware/prepareStream');
const { createLogger, logHelper, removeQuotes, trimXMetaPrefix } = const { createLogger, logHelper, removeQuotes, trimXMetaPrefix } =
require('./utils'); require('./utils');
const jsutil = require('../../../jsutil');
const missingVerIdInternalError = errors.InternalError.customizeDescription( const missingVerIdInternalError = errors.InternalError.customizeDescription(
'Invalid state. Please ensure versioning is enabled ' + 'Invalid state. Please ensure versioning is enabled ' +
@ -318,11 +317,9 @@ class AwsClient {
uploadPart(request, streamingV4Params, stream, size, key, uploadId, uploadPart(request, streamingV4Params, stream, size, key, uploadId,
partNumber, bucketName, log, callback) { partNumber, bucketName, log, callback) {
let hashedStream = stream; let hashedStream = stream;
const cbOnce = jsutil.once(callback);
if (request) { if (request) {
const partStream = prepareStream(request, streamingV4Params, const partStream = prepareStream(request, streamingV4Params,
this._vault, log, cbOnce); this._vault, log, callback);
hashedStream = new MD5Sum(); hashedStream = new MD5Sum();
partStream.pipe(hashedStream); partStream.pipe(hashedStream);
} }
@ -336,7 +333,7 @@ class AwsClient {
if (err) { if (err) {
logHelper(log, 'error', 'err from data backend ' + logHelper(log, 'error', 'err from data backend ' +
'on uploadPart', err, this._dataStoreName, this.clientType); 'on uploadPart', err, this._dataStoreName, this.clientType);
return cbOnce(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`), `${this.type}: ${err.message}`),
); );
@ -350,7 +347,7 @@ class AwsClient {
dataStoreName: this._dataStoreName, dataStoreName: this._dataStoreName,
dataStoreETag: noQuotesETag, dataStoreETag: noQuotesETag,
}; };
return cbOnce(null, dataRetrievalInfo); return callback(null, dataRetrievalInfo);
}); });
} }

View File

@ -1,5 +1,6 @@
const { BlobServiceClient, StorageSharedKeyCredential, AnonymousCredential } = require('@azure/storage-blob'); const url = require('url');
const { ClientSecretCredential } = require('@azure/identity');
const azure = require('azure-storage');
const errors = require('../../../errors').default; const errors = require('../../../errors').default;
const azureMpuUtils = require('../../../s3middleware/azureHelpers/mpuUtils'); const azureMpuUtils = require('../../../s3middleware/azureHelpers/mpuUtils');
const { validateAndFilterMpuParts } = const { validateAndFilterMpuParts } =
@ -7,103 +8,55 @@ const { validateAndFilterMpuParts } =
const { createLogger, logHelper, translateAzureMetaHeaders } = const { createLogger, logHelper, translateAzureMetaHeaders } =
require('./utils'); require('./utils');
const objectUtils = require('../../../s3middleware/objectUtils');
const constants = require('../../../constants'); const constants = require('../../../constants');
const packageVersion = require('../../../../package.json').version; const packageVersion = require('../../../../package.json').version;
class AzureClient { azure.Constants.USER_AGENT_PRODUCT_NAME = constants.productName;
static addQueryParams(endpoint, token) { azure.Constants.USER_AGENT_PRODUCT_VERSION = packageVersion;
const url = new URL(endpoint);
const query = token.startsWith('?') ? token.slice(1) : token;
if (!url.search) {
url.search = `?${query}`;
} else if (url.search === '?') {
url.search += query;
} else {
url.search += `&${query}`;
}
return url.toString();
}
class AzureClient {
constructor(config) { constructor(config) {
this._azureStorageEndpoint = config.azureStorageEndpoint; this._azureStorageEndpoint = config.azureStorageEndpoint;
this._azureStorageCredentials = config.azureStorageCredentials; this._azureStorageCredentials = config.azureStorageCredentials;
this._azureContainerName = config.azureContainerName; this._azureContainerName = config.azureContainerName;
const cred = (credentialsConfig => { this._client = azure.createBlobService(
switch (credentialsConfig.authMethod) { this._azureStorageCredentials.storageAccountName,
case 'client-secret': this._azureStorageCredentials.storageAccessKey,
return new ClientSecretCredential( this._azureStorageEndpoint);
credentialsConfig.tenantId, this._client.enableGlobalHttpAgent = true;
credentialsConfig.clientId,
credentialsConfig.clientKey,
);
case 'shared-access-signature':
this._azureStorageEndpoint = AzureClient.addQueryParams(
this._azureStorageEndpoint, credentialsConfig.sasToken);
return new AnonymousCredential();
case 'shared-key':
default:
return new StorageSharedKeyCredential(
credentialsConfig.storageAccountName,
credentialsConfig.storageAccessKey,
);
}
})(this._azureStorageCredentials);
const proxyOptions = (() => {
if (!config.proxy || !config.proxy.url) {
return undefined;
}
// NOTE: config.proxy.certs is not supported
const parsedUrl = new URL(config.proxy.url);
return {
host: parsedUrl.host,
port: parsedUrl.port || 80,
username: parsedUrl.username || undefined,
password: parsedUrl.password || undefined,
};
})();
this._client = new BlobServiceClient(this._azureStorageEndpoint, cred, {
keepAliveOptions: {
enable: false, // Enable use of global HTTP agent
},
proxyOptions,
userAgentOptions: {
userAgentPrefix: `${constants.productName}/${packageVersion} `,
},
}).getContainerClient(this._azureContainerName);
this._dataStoreName = config.dataStoreName; this._dataStoreName = config.dataStoreName;
this._bucketMatch = config.bucketMatch; this._bucketMatch = config.bucketMatch;
if (config.proxy && config.proxy.url) {
const parsedUrl = url.parse(config.proxy.url);
if (!parsedUrl.port) {
parsedUrl.port = 80;
}
const proxyParams = parsedUrl;
if (config.proxy.certs) {
Object.assign(proxyParams, config.proxy.certs);
}
this._client.setProxy(proxyParams);
}
} }
/** _errorWrapper(s3Method, azureMethod, args, log, cb) {
* Run azure method call.
* @param {string} [s3Method] S3 method name
* @param {string} [azureMethod] Azure method name
* @param {ErrorWrapper~Command} [command] Actual command to run
* @param {RequestLogger} [log] Logger
* @param {ErrorWrapper~Cb} [cb] The final callback
* @returns {void}
*
* @callback ErrorWrapper~Command
* @param {azure.ContainerClient} [client] Azure client to use
* @returns {Promise<any>}
*
* @callback ErrorWrapper~Cb
* @param {azure.ArsenalError} [arsenalErr] Error returned by the command
* @param {any} [result] Result of Azure SDK command
* @returns {void}
*/
_errorWrapper(s3Method, azureMethod, command, log, cb) {
if (log) { if (log) {
log.info(`calling azure ${azureMethod} in ${s3Method}`); log.info(`calling azure ${azureMethod}`);
}
try {
this._client[azureMethod].apply(this._client, args);
} catch (err) {
const error = errors.ServiceUnavailable;
if (log) {
log.error('error thrown by Azure Storage Client Library',
{ error: err.message, stack: err.stack, s3Method,
azureMethod, dataStoreName: this._dataStoreName });
}
cb(error.customizeDescription('Error from Azure ' +
`method: ${azureMethod} on ${s3Method} S3 call: ` +
`${err.message}`));
} }
command(this._client).then(
result => cb(null, result),
cb,
);
} }
_createAzureKey(requestBucketName, requestObjectKey, _createAzureKey(requestBucketName, requestObjectKey,
@ -166,32 +119,6 @@ class AzureClient {
}; };
} }
/**
* Build Azure HTTP headers for content settings
* @param {object} [properties] The blob properties to set.
* @param {string} [properties.contentType] The MIME content type of the blob.
* The default type is application/octet-stream.
* @param {string} [properties.contentEncoding] The content encodings that have been applied
* to the blob.
* @param {string} [properties.contentLanguage] The natural languages used by this resource.
* @param {string} [properties.cacheControl] The blob's cache control.
* @param {string} [properties.contentDisposition] The blob's content disposition.
* @param {string} [properties.contentMD5] The blob's MD5 hash.
* @returns {BlobHTTPHeaders} The headers
*/
_getAzureContentSettingsHeaders(properties) {
return {
blobContentMD5: properties.contentMD5
? objectUtils.getMD5Buffer(properties.contentMD5)
: undefined,
blobContentType: properties.contentType || undefined,
blobCacheControl: properties.cacheControl || undefined,
blobContentDisposition: properties.contentDisposition || undefined,
blobContentEncoding: properties.contentEncoding || undefined,
blobContentLanguage: properties.blobContentLanguage || undefined,
};
}
put(stream, size, keyContext, reqUids, callback, skey, metadata) { put(stream, size, keyContext, reqUids, callback, skey, metadata) {
const log = createLogger(reqUids); const log = createLogger(reqUids);
// before blob is put, make sure there is no ongoing MPU with same key // before blob is put, make sure there is no ongoing MPU with same key
@ -207,59 +134,50 @@ class AzureClient {
const options = { const options = {
metadata: translateAzureMetaHeaders(keyContext.metaHeaders, metadata: translateAzureMetaHeaders(keyContext.metaHeaders,
keyContext.tagging), keyContext.tagging),
blobHTTPHeaders: this._getAzureContentSettingsHeaders( contentSettings: {
keyContext || {}), contentType: keyContext.contentType || undefined,
cacheControl: keyContext.cacheControl || undefined,
contentDisposition: keyContext.contentDisposition ||
undefined,
contentEncoding: keyContext.contentEncoding || undefined,
},
}; };
if (size === 0) { if (size === 0) {
return this._errorWrapper('put', 'uploadData', async client => { return this._errorWrapper('put', 'createBlockBlobFromText',
try { [this._azureContainerName, azureKey, '', options,
await client.getBlockBlobClient(azureKey).upload('', 0, options); err => {
return azureKey; if (err) {
} catch (err) { logHelper(log, 'error', 'err from Azure PUT data ' +
logHelper(log, 'error', 'err from Azure PUT data backend', 'backend', err, this._dataStoreName);
err, this._dataStoreName); return callback(errors.ServiceUnavailable
throw errors.ServiceUnavailable.customizeDescription( .customizeDescription('Error returned from ' +
`Error returned from Azure: ${err.message}`); `Azure: ${err.message}`));
} }
}, log, callback); return callback(null, azureKey);
}], log, callback);
} }
return this._errorWrapper('put', 'createBlockBlobFromStream', async client => { return this._errorWrapper('put', 'createBlockBlobFromStream',
try { [this._azureContainerName, azureKey, stream, size, options,
await client.getBlockBlobClient(azureKey).upload(() => stream, size, options); err => {
return azureKey; if (err) {
} catch (err) { logHelper(log, 'error', 'err from Azure PUT data ' +
logHelper(log, 'error', 'err from Azure PUT data backend', 'backend', err, this._dataStoreName);
err, this._dataStoreName); return callback(errors.ServiceUnavailable
throw errors.ServiceUnavailable.customizeDescription( .customizeDescription('Error returned from ' +
`Error returned from Azure: ${err.message}`); `Azure: ${err.message}`));
} }
}, log, callback); return callback(null, azureKey);
}], log, callback);
}); });
} }
/**
* Build BlobRequestConditions from azureStreamingOptions
* @param {object} [objectGetInfoOptions] Azure streaming options
* @param {object} [objectGetInfoOptions.accessConditions] Access conditions
* @param {Date} [objectGetInfoOptions.accessConditions.DateUnModifiedSince] Filter objects not
* modified since that date.
* @returns {BlobRequestConditions} Request conditions
*/
_getAzureConditions(objectGetInfoOptions) {
const accessConditions = objectGetInfoOptions.accessConditions || {};
return {
ifUnmodifiedSince: accessConditions.DateUnModifiedSince || undefined,
};
}
head(objectGetInfo, reqUids, callback) { head(objectGetInfo, reqUids, callback) {
const log = createLogger(reqUids); const log = createLogger(reqUids);
const { key } = objectGetInfo; const { key, azureStreamingOptions } = objectGetInfo;
return this._errorWrapper('head', 'getBlobProperties', async client => { return this._errorWrapper('head', 'getBlobProperties',
try { [this._azureContainerName, key, azureStreamingOptions,
const data = await client.getBlockBlobClient(key).getProperties(); (err, data) => {
return data; if (err) {
} catch (err) {
let logLevel; let logLevel;
let retError; let retError;
if (err.code === 'NotFound') { if (err.code === 'NotFound') {
@ -267,46 +185,42 @@ class AzureClient {
retError = errors.LocationNotFound; retError = errors.LocationNotFound;
} else { } else {
logLevel = 'error'; logLevel = 'error';
retError = errors.ServiceUnavailable.customizeDescription( retError = errors.ServiceUnavailable
.customizeDescription(
`Error returned from Azure: ${err.message}`); `Error returned from Azure: ${err.message}`);
} }
logHelper(log, logLevel, 'err from Azure HEAD data backend', logHelper(log, logLevel, 'err from Azure HEAD data backend',
err, this._dataStoreName); err, this._dataStoreName);
throw retError; return callback(retError);
} }
}, log, callback); return callback(null, data);
}], log, callback);
} }
get(objectGetInfo, range, reqUids, callback) { get(objectGetInfo, range, reqUids, callback) {
const log = createLogger(reqUids); const log = createLogger(reqUids);
// for backwards compatibility
const { key, response, azureStreamingOptions } = objectGetInfo; const { key, response, azureStreamingOptions } = objectGetInfo;
let rangeStart = 0; let streamingOptions;
let rangeEnd = undefined;
if (azureStreamingOptions) { if (azureStreamingOptions) {
// option coming from api.get() // option coming from api.get()
rangeStart = (typeof azureStreamingOptions.rangeStart === 'string') streamingOptions = azureStreamingOptions;
? parseInt(azureStreamingOptions.rangeStart, 10)
: azureStreamingOptions.rangeStart;
rangeEnd = (typeof azureStreamingOptions.rangeEnd === 'string')
? parseInt(azureStreamingOptions.rangeEnd, 10)
: azureStreamingOptions.rangeEnd;
} else if (range) { } else if (range) {
// option coming from multipleBackend.upload() // option coming from multipleBackend.upload()
rangeStart = (typeof range[0] === 'number') ? range[0] : 0; const rangeStart = (typeof range[0] === 'number') ? range[0].toString() : undefined;
rangeEnd = range[1] || undefined; const rangeEnd = range[1] ? range[1].toString() : undefined;
streamingOptions = { rangeStart, rangeEnd };
} }
this._errorWrapper('get', 'getBlobToStream', async client => { this._errorWrapper('get', 'getBlobToStream',
try { [this._azureContainerName, key, response, streamingOptions,
const rsp = await client.getBlockBlobClient(key) err => {
.download(rangeStart, rangeEnd - rangeStart + 1 || undefined); if (err) {
rsp.readableStreamBody.pipe(response);
return response;
} catch (err) {
logHelper(log, 'error', 'err from Azure GET data backend', logHelper(log, 'error', 'err from Azure GET data backend',
err, this._dataStoreName); err, this._dataStoreName);
throw errors.ServiceUnavailable; return callback(errors.ServiceUnavailable);
} }
}, log, callback); return callback(null, response);
}], log, callback);
} }
delete(objectGetInfo, reqUids, callback) { delete(objectGetInfo, reqUids, callback) {
@ -316,46 +230,44 @@ class AzureClient {
objectGetInfo.key; objectGetInfo.key;
let options; let options;
if (typeof objectGetInfo === 'object') { if (typeof objectGetInfo === 'object') {
options = { options = objectGetInfo.options;
conditions: this._getAzureConditions(objectGetInfo.options || {}),
};
} }
return this._errorWrapper('delete', 'deleteBlobIfExists', async client => { return this._errorWrapper('delete', 'deleteBlobIfExists',
try { [this._azureContainerName, key, options,
await client.getBlockBlobClient(key).deleteIfExists(options); err => {
} catch (err) { if (err && err.statusCode === 412) {
if (err.statusCode === 412) { return callback(errors.PreconditionFailed);
throw errors.PreconditionFailed;
} }
if (err) {
const log = createLogger(reqUids); const log = createLogger(reqUids);
logHelper(log, 'error', 'error deleting object from Azure datastore', logHelper(log, 'error', 'error deleting object from ' +
err, this._dataStoreName); 'Azure datastore', err, this._dataStoreName);
throw errors.ServiceUnavailable.customizeDescription( return callback(errors.ServiceUnavailable
`Error returned from Azure: ${err.message}`); .customizeDescription('Error returned from ' +
`Azure: ${err.message}`));
} }
}, log, callback); return callback();
}], log, callback);
} }
healthcheck(location, callback, flightCheckOnStartUp) { healthcheck(location, callback, flightCheckOnStartUp) {
const azureResp = {}; const azureResp = {};
this._errorWrapper('healthcheck', 'checkAzureHealth', async client => { const healthCheckAction = flightCheckOnStartUp ?
try { 'createContainerIfNotExists' : 'doesContainerExist';
if (flightCheckOnStartUp) { this._errorWrapper('checkAzureHealth', healthCheckAction,
await client.createIfNotExists(); [this._azureContainerName, err => {
} else { /* eslint-disable no-param-reassign */
await client.exists(); if (err) {
azureResp[location] = { error: err.message,
external: true };
return callback(null, azureResp);
} }
azureResp[location] = { azureResp[location] = {
message: 'Congrats! You can access the Azure storage account', message:
'Congrats! You can access the Azure storage account',
}; };
} catch (err) { return callback(null, azureResp);
azureResp[location] = { }], null, callback);
error: err.message,
external: true,
};
}
return azureResp;
}, null, callback);
} }
uploadPart(request, streamingV4Params, partStream, size, key, uploadId, uploadPart(request, streamingV4Params, partStream, size, key, uploadId,
@ -409,7 +321,9 @@ class AzureClient {
completeMPU(jsonList, mdInfo, key, uploadId, bucket, metaHeaders, completeMPU(jsonList, mdInfo, key, uploadId, bucket, metaHeaders,
contentSettings, tagging, log, callback) { contentSettings, tagging, log, callback) {
const azureKey = this._createAzureKey(bucket, key, this._bucketMatch); const azureKey = this._createAzureKey(bucket, key, this._bucketMatch);
const commitList = jsonList.uncommittedBlocks || []; const commitList = {
UncommittedBlocks: jsonList.uncommittedBlocks || [],
};
let filteredPartsObj; let filteredPartsObj;
if (!jsonList.uncommittedBlocks) { if (!jsonList.uncommittedBlocks) {
const { storedParts, mpuOverviewKey, splitter } = mdInfo; const { storedParts, mpuOverviewKey, splitter } = mdInfo;
@ -422,56 +336,60 @@ class AzureClient {
// part.locations is always array of 1, which contains data info // part.locations is always array of 1, which contains data info
const subPartIds = const subPartIds =
azureMpuUtils.getSubPartIds(part.locations[0], uploadId); azureMpuUtils.getSubPartIds(part.locations[0], uploadId);
commitList.push(...subPartIds); commitList.UncommittedBlocks.push(...subPartIds);
}); });
} }
const options = { const options = {
blobHTTPHeaders: this._getAzureContentSettingsHeaders(contentSettings || {}), contentSettings,
metadata: translateAzureMetaHeaders(metaHeaders || {}, tagging), metadata: translateAzureMetaHeaders(metaHeaders || {}, tagging),
}; };
return this._errorWrapper('completeMPU', 'commitBlocks', async client => { return this._errorWrapper('completeMPU', 'commitBlocks',
try { [this._azureContainerName, azureKey, commitList, options,
await client.getBlockBlobClient(azureKey).commitBlockList(commitList, options); err => {
return { if (err) {
logHelper(log, 'error', 'err completing MPU on Azure ' +
'datastore', err, this._dataStoreName);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`Azure: ${err.message}`));
}
const completeObjData = {
key: azureKey, key: azureKey,
filteredPartsObj, filteredPartsObj,
}; };
} catch (err) { return callback(null, completeObjData);
logHelper(log, 'error', 'err completing MPU on Azure datastore', }], log, callback);
err, this._dataStoreName);
throw errors.ServiceUnavailable.customizeDescription(
`Error returned from Azure: ${err.message}`);
}
}, log, callback);
} }
objectPutTagging(key, bucket, objectMD, log, callback) { objectPutTagging(key, bucket, objectMD, log, callback) {
const azureKey = this._createAzureKey(bucket, key, this._bucketMatch); const azureKey = this._createAzureKey(bucket, key, this._bucketMatch);
const azureMD = this._getMetaHeaders(objectMD); const azureMD = this._getMetaHeaders(objectMD);
azureMD.tags = JSON.stringify(objectMD.tags); azureMD.tags = JSON.stringify(objectMD.tags);
this._errorWrapper('objectPutTagging', 'setBlobMetadata', async client => { this._errorWrapper('objectPutTagging', 'setBlobMetadata',
try { [this._azureContainerName, azureKey, azureMD,
await client.getBlockBlobClient(azureKey).setMetadata(azureMD); err => {
} catch (err) { if (err) {
logHelper(log, 'error', 'err putting object tags to Azure backend', logHelper(log, 'error', 'err putting object tags to ' +
err, this._dataStoreName); 'Azure backend', err, this._dataStoreName);
throw errors.ServiceUnavailable; return callback(errors.ServiceUnavailable);
} }
}, log, callback); return callback();
}], log, callback);
} }
objectDeleteTagging(key, bucketName, objectMD, log, callback) { objectDeleteTagging(key, bucketName, objectMD, log, callback) {
const azureKey = this._createAzureKey(bucketName, key, this._bucketMatch); const azureKey = this._createAzureKey(bucketName, key, this._bucketMatch);
const azureMD = this._getMetaHeaders(objectMD); const azureMD = this._getMetaHeaders(objectMD);
this._errorWrapper('objectDeleteTagging', 'setBlobMetadata', async client => { this._errorWrapper('objectDeleteTagging', 'setBlobMetadata',
try { [this._azureContainerName, azureKey, azureMD,
await client.getBlockBlobClient(azureKey).setMetadata(azureMD); err => {
} catch (err) { if (err) {
logHelper(log, 'error', 'err putting object tags to Azure backend', logHelper(log, 'error', 'err putting object tags to ' +
err, this._dataStoreName); 'Azure backend', err, this._dataStoreName);
throw errors.ServiceUnavailable; return callback(errors.ServiceUnavailable);
} }
}, log, callback); return callback();
}], log, callback);
} }
copyObject(request, destLocationConstraintName, sourceKey, copyObject(request, destLocationConstraintName, sourceKey,
@ -488,50 +406,54 @@ class AzureClient {
let options; let options;
if (storeMetadataParams.metaHeaders) { if (storeMetadataParams.metaHeaders) {
options = { options = { metadata:
metadata: translateAzureMetaHeaders(storeMetadataParams.metaHeaders), translateAzureMetaHeaders(storeMetadataParams.metaHeaders) };
};
} }
// TODO: should we use syncCopyBlob() instead? or use poller.pollUntilDone() to wait until complete? this._errorWrapper('copyObject', 'startCopyBlob',
this._errorWrapper('copyObject', 'startCopyBlob', async client => { [`${this._azureStorageEndpoint}` +
let res; `${sourceContainerName}/${sourceKey}`,
try { this._azureContainerName, destAzureKey, options,
const poller = await client.getBlockBlobClient(destAzureKey).beginCopyFromURL( (err, res) => {
`${this._azureStorageEndpoint}${sourceContainerName}/${sourceKey}`, if (err) {
options, if (err.code === 'CannotVerifyCopySource') {
logHelper(log, 'error', 'Unable to access ' +
`${sourceContainerName} Azure Container`, err,
this._dataStoreName);
return callback(errors.AccessDenied
.customizeDescription('Error: Unable to access ' +
`${sourceContainerName} Azure Container`),
); );
res = poller.getOperationState().result;
if (res.copyProgress !== 'pending') {
return destAzureKey;
} }
} catch (err) { logHelper(log, 'error', 'error from data backend on ' +
if (err.code === 'CannotVerifyCopySource') { // TOOD: may use a constant (or type) from SDK ?? 'copyObject', err, this._dataStoreName);
logHelper(log, 'error', return callback(errors.ServiceUnavailable
`Unable to access ${sourceContainerName} Azure Container`, .customizeDescription('Error returned from ' +
`AWS: ${err.message}`),
);
}
if (res.copy.status === 'pending') {
logHelper(log, 'error', 'Azure copy status is pending',
err, this._dataStoreName); err, this._dataStoreName);
throw errors.AccessDenied.customizeDescription( const copyId = res.copy.id;
`Error: Unable to access ${sourceContainerName} Azure Container`); this._client.abortCopyBlob(this._azureContainerName,
destAzureKey, copyId, err => {
if (err) {
logHelper(log, 'error', 'error from data backend ' +
'on abortCopyBlob', err, this._dataStoreName);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`AWS on abortCopyBlob: ${err.message}`),
);
} }
logHelper(log, 'error', 'error from data backend on copyObject', return callback(errors.InvalidObjectState
err, this._dataStoreName); .customizeDescription('Error: Azure copy status was ' +
throw errors.ServiceUnavailable.customizeDescription( 'pending. It has been aborted successfully'),
`Error returned from AWS: ${err.message}`); );
});
} }
return callback(null, destAzureKey);
logHelper(log, 'error', 'Azure copy status is pending', {}, this._dataStoreName); }], log, callback);
try {
await client.getBlockBlobClient(destAzureKey).abortCopyFromURL(res.copyId);
} catch (err) {
logHelper(log, 'error', 'error from data backend on abortCopyBlob',
err, this._dataStoreName);
throw errors.ServiceUnavailable.customizeDescription(
`Error returned from AWS on abortCopyBlob: ${err.message}`);
}
throw errors.InvalidObjectState.customizeDescription(
'Error: Azure copy status was pending. It has been aborted successfully');
}, log, callback);
} }
} }

View File

@ -1,696 +0,0 @@
// Zenko CloudServer Vitastor data storage backend adapter
// Copyright (c) Vitaliy Filippov, 2019+
// License: VNPL-1.1 (see README.md for details)
const stream = require('stream');
const vitastor = require('vitastor');
const VOLUME_MAGIC = 'VstS3Vol';
const OBJECT_MAGIC = 'VstS3Obj';
const FLAG_DELETED = 2n;
type Volume = {
id: number,
partial_sectors: {
[key: string]: {
buffer: Buffer,
refs: number,
},
},
header: {
location: string,
bucket: string,
max_size: number,
create_ts: number,
used_ts: number,
size: number,
objects: number,
removed_objects: number,
object_bytes: number,
removed_bytes: number,
},
};
type ObjectHeader = {
size: number,
key: string,
part_num?: number,
};
class VitastorBackend
{
locationName: string;
config: {
pool_id: number,
metadata_image: string,
metadata_pool_id: number,
metadata_inode_num: number,
size_buckets: number[],
size_bucket_mul: number,
id_batch_size: number,
sector_size: number,
write_chunk_size: number,
read_chunk_size: number,
pack_objects: boolean,
// and also other parameters for vitastor itself
};
next_id: number;
alloc_id: number;
opened: boolean;
on_open: ((...args: any[]) => void)[] | null;
open_error: Error | null;
cli: any;
kv: any;
volumes: {
[bucket: string]: {
[max_size: string]: Volume,
},
};
volumes_by_id: {
[id: string]: Volume,
};
volume_delete_stats: {
[id: string]: {
count: number,
bytes: number,
},
};
constructor(locationName, config)
{
this.locationName = locationName;
this.config = config;
// validate config
this.config.pool_id = Number(this.config.pool_id) || 0;
if (!this.config.pool_id)
throw new Error('pool_id is required for Vitastor');
if (!this.config.metadata_image && (!this.config.metadata_pool_id || !this.config.metadata_inode_num))
throw new Error('metadata_image or metadata_inode is required for Vitastor');
if (!this.config.size_buckets || !this.config.size_buckets.length)
this.config.size_buckets = [ 32*1024, 128*1024, 512*1024, 2*1024, 8*1024 ];
this.config.size_bucket_mul = Number(this.config.size_bucket_mul) || 2;
this.config.id_batch_size = Number(this.config.id_batch_size) || 100;
this.config.sector_size = Number(this.config.sector_size) || 0;
if (this.config.sector_size < 4096)
this.config.sector_size = 4096;
this.config.write_chunk_size = Number(this.config.write_chunk_size) || 0;
if (this.config.write_chunk_size < this.config.sector_size)
this.config.write_chunk_size = 4*1024*1024; // 4 MB
this.config.read_chunk_size = Number(this.config.read_chunk_size) || 0;
if (this.config.read_chunk_size < this.config.sector_size)
this.config.read_chunk_size = 4*1024*1024; // 4 MB
this.config.pack_objects = !!this.config.pack_objects;
// state
this.next_id = 1;
this.alloc_id = 0;
this.opened = false;
this.on_open = null;
this.open_error = null;
this.cli = new vitastor.Client(config);
this.kv = new vitastor.KV(this.cli);
// we group objects into volumes by bucket and size
this.volumes = {};
this.volumes_by_id = {};
this.volume_delete_stats = {};
}
async _makeVolumeId()
{
if (this.next_id <= this.alloc_id)
{
return this.next_id++;
}
const id_key = 'id'+this.config.pool_id;
const [ err, prev ] = await new Promise<[ any, string ]>(ok => this.kv.get(id_key, (err, value) => ok([ err, value ])));
if (err && err != vitastor.ENOENT)
{
throw new Error(err);
}
const new_id = (parseInt(prev) || 0) + 1;
this.next_id = new_id;
this.alloc_id = this.next_id + this.config.id_batch_size - 1;
await new Promise((ok, no) => this.kv.set(id_key, this.alloc_id, err => (err ? no(new Error(err)) : ok(null)), cas_old => cas_old === prev));
return this.next_id;
}
async _getVolume(bucketName, size)
{
if (!this.opened)
{
if (this.on_open)
{
await new Promise(ok => this.on_open!.push(ok));
}
else
{
this.on_open = [];
if (this.config.metadata_image)
{
const img = new vitastor.Image(this.cli, this.config.metadata_image);
const info = await new Promise<{ pool_id: number, inode_num: number }>(ok => img.get_info(ok));
this.config.metadata_pool_id = info.pool_id;
this.config.metadata_inode_num = info.inode_num;
}
const kv_config = {};
for (const key in this.config)
{
if (key.substr(0, 3) === 'kv_')
kv_config[key] = this.config[key];
}
this.open_error = await new Promise(ok => this.kv.open(
this.config.metadata_pool_id, this.config.metadata_inode_num,
kv_config, err => ok(err ? new Error(err) : null)
));
this.opened = true;
this.on_open.map(cb => setImmediate(cb));
this.on_open = null;
}
}
if (this.open_error)
{
throw this.open_error;
}
let i;
for (i = 0; i < this.config.size_buckets.length && size >= this.config.size_buckets[i]; i++) {}
let s;
if (i < this.config.size_buckets.length)
s = this.config.size_buckets[i];
else if (this.config.size_bucket_mul > 1)
{
while (size >= s)
s = Math.floor(this.config.size_bucket_mul * s);
}
if (!this.volumes[bucketName])
{
this.volumes[bucketName] = {};
}
if (this.volumes[bucketName][s])
{
return this.volumes[bucketName][s];
}
const new_id = await this._makeVolumeId();
const new_vol = this.volumes[bucketName][s] = {
id: new_id,
// FIXME: partial_sectors should be written with CAS because otherwise we may lose quick deletes
partial_sectors: {},
header: {
location: this.locationName,
bucket: bucketName,
max_size: s,
create_ts: Date.now(),
used_ts: Date.now(),
size: this.config.sector_size, // initial position is right after header
objects: 0,
removed_objects: 0,
object_bytes: 0,
removed_bytes: 0,
},
};
this.volumes_by_id[new_id] = new_vol;
const header_text = JSON.stringify(this.volumes[bucketName][s].header);
const buf = Buffer.alloc(this.config.sector_size);
buf.write(VOLUME_MAGIC + header_text, 0);
await new Promise((ok, no) => this.cli.write(
this.config.pool_id, new_id, 0, buf, err => (err ? no(new Error(err)) : ok(null))
));
await new Promise((ok, no) => this.kv.set(
'vol_'+this.config.pool_id+'_'+new_id, header_text, err => (err ? no(new Error(err)) : ok(null)), cas_old => !cas_old
));
return new_vol;
}
toObjectGetInfo(objectKey, bucketName, storageLocation)
{
return null;
}
_bufferStart(vol, cur_pos, cur_size, cur_chunks, sector_refs)
{
if ((cur_pos % this.config.sector_size) ||
Math.floor((cur_pos + cur_size) / this.config.sector_size) == Math.floor(cur_pos / this.config.sector_size))
{
const sect_pos = Math.floor(cur_pos / this.config.sector_size) * this.config.sector_size;
const sect = vol.partial_sectors[sect_pos]
? vol.partial_sectors[sect_pos].buffer
: Buffer.alloc(this.config.sector_size);
if (this.config.pack_objects)
{
// Save only if <pack_objects>
if (!vol.partial_sectors[sect_pos])
vol.partial_sectors[sect_pos] = { buffer: sect, refs: 0 };
vol.partial_sectors[sect_pos].refs++;
sector_refs.push(sect_pos);
}
let off = cur_pos % this.config.sector_size;
let i = 0;
for (; i < cur_chunks.length; i++)
{
let copy_len = this.config.sector_size - off;
copy_len = copy_len > cur_chunks[i].length ? cur_chunks[i].length : copy_len;
cur_chunks[i].copy(sect, off, 0, copy_len);
off += copy_len;
if (copy_len < cur_chunks[i].length)
{
cur_chunks[i] = cur_chunks[i].slice(copy_len);
cur_size -= copy_len;
break;
}
else
cur_size -= cur_chunks[i].length;
}
cur_chunks.splice(0, i, sect);
cur_size += this.config.sector_size;
cur_pos = sect_pos;
}
return [ cur_pos, cur_size ];
}
_bufferEnd(vol, cur_pos, cur_size, cur_chunks, sector_refs, write_all)
{
const write_pos = cur_pos;
const write_chunks = cur_chunks;
let write_size = cur_size;
cur_chunks = [];
cur_pos += cur_size;
cur_size = 0;
let remain = (cur_pos % this.config.sector_size);
if (remain > 0)
{
cur_pos -= remain;
let last_sect = null;
if (write_all)
{
last_sect = vol.partial_sectors[cur_pos]
? vol.partial_sectors[cur_pos].buffer
: Buffer.alloc(this.config.sector_size);
if (this.config.pack_objects)
{
// Save only if <pack_objects>
if (!vol.partial_sectors[cur_pos])
vol.partial_sectors[cur_pos] = { buffer: last_sect, refs: 0 };
vol.partial_sectors[cur_pos].refs++;
sector_refs.push(cur_pos);
}
}
write_size -= remain;
if (write_size < 0)
write_size = 0;
for (let i = write_chunks.length-1; i >= 0 && remain > 0; i--)
{
if (write_chunks[i].length <= remain)
{
remain -= write_chunks[i].length;
if (write_all)
write_chunks[i].copy(last_sect, remain);
else
cur_chunks.unshift(write_chunks[i]);
write_chunks.pop();
}
else
{
if (write_all)
write_chunks[i].copy(last_sect, 0, write_chunks[i].length - remain);
else
cur_chunks.unshift(write_chunks[i].slice(write_chunks[i].length - remain));
write_chunks[i] = write_chunks[i].slice(0, write_chunks[i].length - remain);
remain = 0;
i++;
}
}
if (write_all)
{
write_chunks.push(last_sect);
write_size += this.config.sector_size;
}
}
for (const chunk of cur_chunks)
{
cur_size += chunk.length;
}
return [ write_pos, write_chunks, write_size, cur_pos, cur_size, cur_chunks ];
}
/**
* reqUids: string, // request-ids for log, usually joined by ':'
* keyContext: {
* // a lot of shit, basically all metadata
* bucketName,
* objectKey,
* owner?,
* namespace?,
* partNumber?,
* uploadId?,
* metaHeaders?,
* isDeleteMarker?,
* tagging?,
* contentType?,
* cacheControl?,
* contentDisposition?,
* contentEncoding?,
* },
* callback: (error, objectGetInfo: any) => void,
*/
put(stream, size, keyContext, reqUids, callback)
{
callback = once(callback);
this._getVolume(keyContext.bucketName, size)
.then(vol => this._put(vol, stream, size, keyContext, reqUids, callback))
.catch(callback);
}
_put(vol, stream, size, keyContext, reqUids, callback)
{
const object_header: ObjectHeader = {
size,
key: keyContext.objectKey,
};
if (keyContext.partNumber)
{
object_header.part_num = keyContext.partNumber;
}
// header is: <8 bytes magic> <8 bytes flags> <8 bytes json length> <json>
const hdr_begin_buf = Buffer.alloc(24);
const hdr_json_buf = Buffer.from(JSON.stringify(object_header), 'utf-8');
hdr_begin_buf.write(OBJECT_MAGIC);
hdr_begin_buf.writeBigInt64LE(BigInt(hdr_json_buf.length), 16);
const object_header_buf = Buffer.concat([ hdr_begin_buf, hdr_json_buf ]);
const object_pos = vol.header.size;
const object_get_info = { volume: vol.id, offset: object_pos, hdrlen: object_header_buf.length, size };
let cur_pos = object_pos;
let cur_chunks = [ object_header_buf ];
let cur_size = object_header_buf.length;
let err: Error|null = null;
let waiting = 1; // 1 for end or error, 1 for each write request
vol.header.size += object_header_buf.length + size;
if (!this.config.pack_objects && (vol.header.size % this.config.sector_size))
{
vol.header.size += this.config.sector_size - (vol.header.size % this.config.sector_size);
}
const writeChunk = (last) =>
{
const sector_refs = [];
// Handle partial beginning
[ cur_pos, cur_size ] = this._bufferStart(vol, cur_pos, cur_size, cur_chunks, sector_refs);
// Handle partial end
let write_pos, write_chunks, write_size;
[ write_pos, write_chunks, write_size, cur_pos, cur_size, cur_chunks ] = this._bufferEnd(vol, cur_pos, cur_size, cur_chunks, sector_refs, last);
waiting++;
// FIXME: pool_id: maybe it should be stored in volume metadata to allow to migrate volumes?
this.cli.write(this.config.pool_id, vol.id, write_pos, write_chunks, (res) =>
{
for (const sect of sector_refs)
{
vol.partial_sectors[sect].refs--;
if (!vol.partial_sectors[sect].refs &&
vol.header.size >= sect+this.config.sector_size)
{
// Forget partial data when it's not needed anymore
delete(vol.partial_sectors[sect]);
}
}
waiting--;
if (res)
{
err = new Error(res);
waiting--;
}
if (!waiting)
{
callback(err, err ? null : object_get_info);
}
});
};
// Stream data
stream.on('error', (e) =>
{
err = e;
waiting--;
if (!waiting)
{
callback(err, null);
}
});
stream.on('end', () =>
{
if (err)
{
return;
}
waiting--;
if (cur_size)
{
// write last chunk
writeChunk(true);
}
if (!waiting)
{
callback(null, object_get_info);
}
});
stream.on('data', (chunk) =>
{
if (err)
{
return;
}
cur_chunks.push(chunk);
cur_size += chunk.length;
if (cur_size >= this.config.write_chunk_size)
{
// got a complete chunk, write it out
writeChunk(false);
}
});
}
/**
* objectGetInfo: {
* key: { volume, offset, hdrlen, size }, // from put
* size,
* start,
* dataStoreName,
* dataStoreETag,
* range,
* response: ServerResponse,
* },
* range?: [ start, end ], // like in HTTP - first byte index, last byte index
* callback: (error, readStream) => void,
*/
get(objectGetInfo, range, reqUids, callback)
{
if (!(objectGetInfo instanceof Object) || !objectGetInfo.key ||
!(objectGetInfo.key instanceof Object) || !objectGetInfo.key.volume ||
!objectGetInfo.key.offset || !objectGetInfo.key.hdrlen || !objectGetInfo.key.size)
{
throw new Error('objectGetInfo must be { key: { volume, offset, hdrlen, size } }, but is '+JSON.stringify(objectGetInfo));
}
const [ start, end ] = range || [];
if (start < 0 || end < 0 || end != null && start != null && end < start || start >= objectGetInfo.key.size)
{
throw new Error('Invalid range: '+start+'-'+end);
}
let offset = objectGetInfo.key.offset + objectGetInfo.key.hdrlen + (start || 0);
let len = objectGetInfo.key.size - (start || 0);
if (end)
{
const len2 = end - (start || 0) + 1;
if (len2 < len)
len = len2;
}
callback(null, new VitastorReadStream(this.cli, objectGetInfo.key.volume, offset, len, this.config));
}
/**
* objectGetInfo: {
* key: { volume, offset, hdrlen, size }, // from put
* size,
* start,
* dataStoreName,
* dataStoreETag,
* range,
* response: ServerResponse,
* },
* callback: (error) => void,
*/
delete(objectGetInfo, reqUids, callback)
{
callback = once(callback);
this._delete(objectGetInfo, reqUids)
.then(callback)
.catch(callback);
}
async _delete(objectGetInfo, reqUids)
{
if (!(objectGetInfo instanceof Object) || !objectGetInfo.key ||
!(objectGetInfo.key instanceof Object) || !objectGetInfo.key.volume ||
!objectGetInfo.key.offset || !objectGetInfo.key.hdrlen || !objectGetInfo.key.size)
{
throw new Error('objectGetInfo must be { key: { volume, offset, hdrlen, size } }, but is '+JSON.stringify(objectGetInfo));
}
const in_sect_pos = (objectGetInfo.key.offset % this.config.sector_size);
const sect_pos = objectGetInfo.key.offset - in_sect_pos;
const vol = this.volumes_by_id[objectGetInfo.key.volume];
if (vol && vol.partial_sectors[sect_pos])
{
// The sector may still be written to in corner cases
const sect = vol.partial_sectors[sect_pos];
const flags = sect.buffer.readBigInt64LE(in_sect_pos + 8);
if (!(flags & FLAG_DELETED))
{
const del_stat = this.volume_delete_stats[vol.id] = (this.volume_delete_stats[vol.id] || { count: 0, bytes: 0 });
del_stat.count++;
del_stat.bytes += objectGetInfo.key.size;
sect.buffer.writeBigInt64LE(flags | FLAG_DELETED, in_sect_pos + 8);
sect.refs++;
const err = await new Promise<any>(ok => this.cli.write(this.config.pool_id, objectGetInfo.key.volume, sect_pos, sect.buffer, ok));
sect.refs--;
if (err)
{
sect.buffer.writeBigInt64LE(0n, in_sect_pos + 8);
throw new Error(err);
}
}
}
else
{
// RMW with CAS
const [ err, buf, version ] = await new Promise<[ any, Buffer, bigint ]>(ok => this.cli.read(
this.config.pool_id, objectGetInfo.key.volume, sect_pos, this.config.sector_size,
(err, buf, version) => ok([ err, buf, version ])
));
if (err)
{
throw new Error(err);
}
// FIXME What if JSON crosses sector boundary? Prevent it if we want to pack objects
const magic = buf.slice(in_sect_pos, in_sect_pos+8).toString();
const flags = buf.readBigInt64LE(in_sect_pos+8);
const json_len = Number(buf.readBigInt64LE(in_sect_pos+16));
let json_hdr;
if (in_sect_pos+24+json_len <= buf.length)
{
try
{
json_hdr = JSON.parse(buf.slice(in_sect_pos+24, in_sect_pos+24+json_len).toString());
}
catch (e)
{
}
}
if (magic !== OBJECT_MAGIC || !json_hdr || json_hdr.size !== objectGetInfo.key.size)
{
throw new Error(
'header of object with size '+objectGetInfo.key.size+
' bytes not found in volume '+objectGetInfo.key.volume+' at '+objectGetInfo.key.offset
);
}
else if (!(flags & FLAG_DELETED))
{
buf.writeBigInt64LE(flags | FLAG_DELETED, in_sect_pos + 8);
const err = await new Promise<any>(ok => this.cli.write(this.config.pool_id, objectGetInfo.key.volume, sect_pos, buf, { version: version+1n }, ok));
if (err == vitastor.EINTR)
{
// Retry
await this._delete(objectGetInfo, reqUids);
}
else if (err)
{
throw new Error(err);
}
else
{
// FIXME: Write deletion statistics to volumes
// FIXME: Implement defragmentation
const del_stat = this.volume_delete_stats[objectGetInfo.key.volume] = (this.volume_delete_stats[objectGetInfo.key.volume] || { count: 0, bytes: 0 });
del_stat.count++;
del_stat.bytes += objectGetInfo.key.size;
}
}
}
}
/**
* config: full zenko server config,
* callback: (error, stats) => void, // stats is the returned statistics in arbitrary format
*/
getDiskUsage(config, reqUids, callback)
{
// FIXME: Iterate all volumes and return its sizes and deletion statistics, or maybe just sizes
callback(null, {});
}
}
class VitastorReadStream extends stream.Readable
{
constructor(cli, volume_id, offset, len, config, options = undefined)
{
super(options);
this.cli = cli;
this.volume_id = volume_id;
this.offset = offset;
this.end = offset + len;
this.pos = offset;
this.config = config;
this._reading = false;
}
_read(n)
{
if (this._reading)
{
return;
}
// FIXME: Validate object header
const chunk_size = n && this.config.read_chunk_size < n ? n : this.config.read_chunk_size;
const read_offset = this.pos;
const round_offset = read_offset - (read_offset % this.config.sector_size);
let read_end = this.end <= read_offset+chunk_size ? this.end : read_offset+chunk_size;
const round_end = (read_end % this.config.sector_size)
? read_end + this.config.sector_size - (read_end % this.config.sector_size)
: read_end;
if (round_end <= this.end)
read_end = round_end;
this.pos = read_end;
if (read_end <= read_offset)
{
// EOF
this.push(null);
return;
}
this._reading = true;
this.cli.read(this.config.pool_id, this.volume_id, round_offset, round_end-round_offset, (err, buf, version) =>
{
this._reading = false;
if (err)
{
this.destroy(new Error(err));
return;
}
if (read_offset != round_offset || round_end != read_end)
{
buf = buf.subarray(read_offset-round_offset, buf.length-(round_end-read_end));
}
if (this.push(buf))
{
this._read(n);
}
});
}
}
function once(callback)
{
let called = false;
return function()
{
if (!called)
{
called = true;
callback.apply(null, arguments);
}
};
}
module.exports = VitastorBackend;

View File

@ -177,42 +177,6 @@ class MetadataWrapper {
}); });
} }
updateBucketCapabilities(bucketName, bucketMD, capabilityName, capacityField, capability, log, cb) {
log.debug('updating bucket capabilities in metadata');
// When concurrency update is not supported, we update the whole bucket metadata
if (!this.client.putBucketAttributesCapabilities) {
return this.updateBucket(bucketName, bucketMD, log, cb);
}
return this.client.putBucketAttributesCapabilities(bucketName, capabilityName, capacityField, capability,
log, err => {
if (err) {
log.debug('error from metadata', { implName: this.implName,
error: err });
return cb(err);
}
log.trace('bucket capabilities updated in metadata');
return cb(err);
});
}
deleteBucketCapabilities(bucketName, bucketMD, capabilityName, capacityField, log, cb) {
log.debug('deleting bucket capabilities in metadata');
// When concurrency update is not supported, we update the whole bucket metadata
if (!this.client.deleteBucketAttributesCapability) {
return this.updateBucket(bucketName, bucketMD, log, cb);
}
return this.client.deleteBucketAttributesCapability(bucketName, capabilityName, capacityField,
log, err => {
if (err) {
log.debug('error from metadata', { implName: this.implName,
error: err });
return cb(err);
}
log.trace('bucket capabilities deleted in metadata');
return cb(err);
});
}
getBucket(bucketName, log, cb) { getBucket(bucketName, log, cb) {
log.debug('getting bucket from metadata'); log.debug('getting bucket from metadata');
this.client.getBucketAttributes(bucketName, log, (err, data) => { this.client.getBucketAttributes(bucketName, log, (err, data) => {
@ -226,19 +190,6 @@ class MetadataWrapper {
}); });
} }
getBucketQuota(bucketName, log, cb) {
log.debug('getting bucket quota from metadata');
this.client.getBucketAttributes(bucketName, log, (err, data) => {
if (err) {
log.debug('error from metadata', { implName: this.implName,
error: err });
return cb(err);
}
const bucketInfo = BucketInfo.fromObj(data);
return cb(err, { quota: bucketInfo.getQuota() });
});
}
deleteBucket(bucketName, log, cb) { deleteBucket(bucketName, log, cb) {
log.debug('deleting bucket from metadata'); log.debug('deleting bucket from metadata');
this.client.deleteBucket(bucketName, log, err => { this.client.deleteBucket(bucketName, log, err => {
@ -324,7 +275,7 @@ class MetadataWrapper {
}); });
} }
deleteObjectMD(bucketName, objName, params, log, cb, originOp = 's3:ObjectRemoved:Delete') { deleteObjectMD(bucketName, objName, params, log, cb) {
log.debug('deleting object from metadata'); log.debug('deleting object from metadata');
this.client.deleteObject(bucketName, objName, params, log, err => { this.client.deleteObject(bucketName, objName, params, log, err => {
if (err) { if (err) {
@ -334,7 +285,7 @@ class MetadataWrapper {
} }
log.debug('object deleted from metadata'); log.debug('object deleted from metadata');
return cb(err); return cb(err);
}, originOp); });
} }
listObject(bucketName, listingParams, log, cb) { listObject(bucketName, listingParams, log, cb) {
@ -548,139 +499,6 @@ class MetadataWrapper {
return cb(); return cb();
}); });
} }
/**
* Put bucket indexes
*
* indexSpec format:
* [
* { key:[ { key: "", order: 1 } ... ], name: <id 1>, ... , < backend options> },
* ...
* { key:[ { key: "", order: 1 } ... ], name: <id n>, ... },
* ]
*
*
* @param {String} bucketName bucket name
* @param {Array<Object>} indexSpecs index specification
* @param {Object} log logger
* @param {Function} cb callback
* @return {undefined}
*/
putBucketIndexes(bucketName, indexSpecs, log, cb) {
log.debug('put bucket indexes');
if (typeof this.client.putBucketIndexes !== 'function') {
log.error('error from metadata', {
method: 'putBucketIndexes',
error: errors.NotImplemented,
implName: this.implName,
});
return cb(errors.NotImplemented);
}
return this.client.putBucketIndexes(bucketName, indexSpecs, log, err => {
if (err) {
log.debug('error from metadata', {
method: 'putBucketIndexes',
error: err,
implName: this.implName,
});
return cb(err);
}
return cb(null);
});
}
/**
* Delete bucket indexes
*
* indexSpec format:
* [
* { key:[ { key: "", order: 1 } ... ], name: <id 1>, ... , < backend options> },
* ...
* { key:[ { key: "", order: 1 } ... ], name: <id n>, ... },
* ]
*
*
* @param {String} bucketName bucket name
* @param {Array<Object>} indexSpecs index specification
* @param {Object} log logger
* @param {Function} cb callback
* @return {undefined}
*/
deleteBucketIndexes(bucketName, indexSpecs, log, cb) {
log.debug('delete bucket indexes');
if (typeof this.client.deleteBucketIndexes !== 'function') {
log.error('error from metadata', {
method: 'deleteBucketIndexes',
error: errors.NotImplemented,
implName: this.implName,
});
return cb(errors.NotImplemented);
}
return this.client.deleteBucketIndexes(bucketName, indexSpecs, log, err => {
if (err) {
log.error('error from metadata', {
method: 'deleteBucketIndexes',
error: err,
implName: this.implName,
});
return cb(err);
}
return cb(null);
});
}
getBucketIndexes(bucketName, log, cb) {
log.debug('get bucket indexes');
if (typeof this.client.getBucketIndexes !== 'function') {
log.debug('error from metadata', {
method: 'getBucketIndexes',
error: errors.NotImplemented,
implName: this.implName,
});
return cb(errors.NotImplemented);
}
return this.client.getBucketIndexes(bucketName, log, (err, res) => {
if (err) {
log.debug('error from metadata', {
method: 'getBucketIndexes',
error: err,
implName: this.implName,
});
return cb(err);
}
return cb(null, res);
});
}
getIndexingJobs(log, cb) {
if (typeof this.client.getIndexingJobs !== 'function') {
log.debug('error from metadata', {
method: 'getIndexingJobs',
error: errors.NotImplemented,
implName: this.implName,
});
return cb(errors.NotImplemented);
}
return this.client.getIndexingJobs(log, (err, res) => {
if (err) {
log.debug('error from metadata', {
method: 'getBucketIndexes',
error: err,
implName: this.implName,
});
return cb(err);
}
return cb(null, res);
});
}
} }
module.exports = MetadataWrapper; module.exports = MetadataWrapper;

View File

@ -108,26 +108,9 @@ class ListRecordStream extends stream.Readable {
if (value && value.tags) { if (value && value.tags) {
value.tags = unescape(value.tags); value.tags = unescape(value.tags);
} }
// updates overwrite the whole metadata,
// so they are considered as puts
let type = 'put';
// When the object metadata contain the "deleted"
// flag, it means that the operation is the update
// we perform before the deletion of an object. We
// perform the update to keep all the metadata in the
// oplog. This update is what will be used by backbeat
// as the delete operation so we put the type of operation
// for this event to a delete.
// Backbeat still receives the actual delete operations
// but they are ignored as they don't contain any metadata.
// The delete operations are kept in case we want to listen
// to delete events comming from special collections other
// than "bucket" collections.
if (value && value.deleted) {
type = 'delete';
}
entry = { entry = {
type, type: 'put', // updates overwrite the whole metadata,
// so they are considered as puts
key: itemObj.o2._id, key: itemObj.o2._id,
// updated value may be either stored directly in 'o' // updated value may be either stored directly in 'o'
// attribute or in '$set' attribute (supposedly when // attribute or in '$set' attribute (supposedly when

File diff suppressed because it is too large Load Diff

View File

@ -85,8 +85,7 @@ class MongoReadStream extends Readable {
Object.assign(query, searchOptions); Object.assign(query, searchOptions);
} }
const projection = { 'value.location': 0 }; this._cursor = c.find(query).sort({
this._cursor = c.find(query, { projection }).sort({
_id: options.reverse ? -1 : 1, _id: options.reverse ? -1 : 1,
}); });
if (options.limit && options.limit !== -1) { if (options.limit && options.limit !== -1) {
@ -102,10 +101,15 @@ class MongoReadStream extends Readable {
return; return;
} }
this._cursor.next().then(doc => { this._cursor.next((err, doc) => {
if (this._destroyed) { if (this._destroyed) {
return; return;
} }
if (err) {
this.emit('error', err);
return;
}
let key = undefined; let key = undefined;
let value = undefined; let value = undefined;
@ -129,12 +133,6 @@ class MongoReadStream extends Readable {
value, value,
}); });
} }
}).catch(err => {
if (this._destroyed) {
return;
}
this.emit('error', err);
return;
}); });
} }
@ -144,7 +142,7 @@ class MongoReadStream extends Readable {
} }
this._destroyed = true; this._destroyed = true;
this._cursor.close().catch(err => { this._cursor.close(err => {
if (err) { if (err) {
this.emit('error', err); this.emit('error', err);
return; return;

View File

@ -185,48 +185,6 @@ function formatVersionKey(key, versionId, vFormat) {
return formatVersionKeyV0(key, versionId); return formatVersionKeyV0(key, versionId);
} }
function indexFormatMongoArrayToObject(mongoIndexArray) {
const indexObj = [];
for (const idx of mongoIndexArray) {
const keys = [];
let entries = [];
if (idx.key instanceof Map) {
entries = idx.key.entries();
} else {
entries = Object.entries(idx.key);
}
for (const k of entries) {
keys.push({ key: k[0], order: k[1] });
}
indexObj.push({ name: idx.name, keys });
}
return indexObj;
}
function indexFormatObjectToMongoArray(indexObj) {
const mongoIndexArray = [];
for (const idx of indexObj) {
const key = new Map();
for (const k of idx.keys) {
key.set(k.key, k.order);
}
// copy all field except keys from idx
// eslint-disable-next-line
const { keys: _, ...toCopy } = idx;
mongoIndexArray.push(Object.assign(toCopy, { name: idx.name, key }));
}
return mongoIndexArray;
}
module.exports = { module.exports = {
credPrefix, credPrefix,
@ -237,6 +195,4 @@ module.exports = {
translateConditions, translateConditions,
formatMasterKey, formatMasterKey,
formatVersionKey, formatVersionKey,
indexFormatMongoArrayToObject,
indexFormatObjectToMongoArray,
}; };

View File

@ -29,4 +29,5 @@ server.start(() => {
logger.info('Metadata Proxy Server successfully started. ' + logger.info('Metadata Proxy Server successfully started. ' +
`Using the ${metadataWrapper.implName} backend`); `Using the ${metadataWrapper.implName} backend`);
}); });
``` ```

View File

@ -10,21 +10,21 @@ function trySetDirSyncFlag(path) {
const GETFLAGS = 2148034049; const GETFLAGS = 2148034049;
const SETFLAGS = 1074292226; const SETFLAGS = 1074292226;
const FS_DIRSYNC_FL = 65536n; const FS_DIRSYNC_FL = 65536;
const buffer = Buffer.alloc(8, 0); const buffer = Buffer.alloc(8, 0);
const pathFD = fs.openSync(path, 'r'); const pathFD = fs.openSync(path, 'r');
const status = ioctl(pathFD, GETFLAGS, buffer); const status = ioctl(pathFD, GETFLAGS, buffer);
assert.strictEqual(status, 0); assert.strictEqual(status, 0);
const currentFlags = buffer.readBigInt64LE(0); const currentFlags = buffer.readUIntLE(0, 8);
const flags = currentFlags | FS_DIRSYNC_FL; const flags = currentFlags | FS_DIRSYNC_FL;
buffer.writeBigInt64LE(flags, 0); buffer.writeUIntLE(flags, 0, 8);
const status2 = ioctl(pathFD, SETFLAGS, buffer); const status2 = ioctl(pathFD, SETFLAGS, buffer);
assert.strictEqual(status2, 0); assert.strictEqual(status2, 0);
fs.closeSync(pathFD); fs.closeSync(pathFD);
const pathFD2 = fs.openSync(path, 'r'); const pathFD2 = fs.openSync(path, 'r');
const confirmBuffer = Buffer.alloc(8, 0); const confirmBuffer = Buffer.alloc(8, 0);
ioctl(pathFD2, GETFLAGS, confirmBuffer); ioctl(pathFD2, GETFLAGS, confirmBuffer);
assert.strictEqual(confirmBuffer.readBigInt64LE(0), assert.strictEqual(confirmBuffer.readUIntLE(0, 8),
currentFlags | FS_DIRSYNC_FL, 'FS_DIRSYNC_FL not set'); currentFlags | FS_DIRSYNC_FL, 'FS_DIRSYNC_FL not set');
fs.closeSync(pathFD2); fs.closeSync(pathFD2);
} }

View File

@ -120,8 +120,8 @@ export function generateVersionId(info: string, replicationGroupId: string): str
lastSeq = lastTimestamp === ts ? lastSeq + 1 : 0; lastSeq = lastTimestamp === ts ? lastSeq + 1 : 0;
lastTimestamp = ts; lastTimestamp = ts;
// if S3_VERSION_ID_ENCODING_TYPE is "hex", info is used. // if S3_VERSION_ID_ENCODING_TYPE is "hex", info is used. By default, it is not used.
if (process.env.S3_VERSION_ID_ENCODING_TYPE === 'hex' || !process.env.S3_VERSION_ID_ENCODING_TYPE) { if (process.env.S3_VERSION_ID_ENCODING_TYPE === 'hex') {
// info field stays as is // info field stays as is
} else { } else {
info = ''; // eslint-disable-line info = ''; // eslint-disable-line

View File

@ -3,54 +3,54 @@
"engines": { "engines": {
"node": ">=16" "node": ">=16"
}, },
"version": "8.1.134", "version": "7.70.31",
"description": "Common utilities for the S3 project components", "description": "Common utilities for the S3 project components",
"main": "build/index.js", "main": "build/index.js",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/scality/Arsenal.git" "url": "git+https://github.com/scality/Arsenal.git"
}, },
"author": "Scality Inc.", "author": "Giorgio Regni",
"license": "Apache-2.0", "license": "Apache-2.0",
"bugs": { "bugs": {
"url": "https://github.com/scality/Arsenal/issues" "url": "https://github.com/scality/Arsenal/issues"
}, },
"homepage": "https://github.com/scality/Arsenal#readme", "homepage": "https://github.com/scality/Arsenal#readme",
"dependencies": { "dependencies": {
"@azure/identity": "^3.1.1",
"@azure/storage-blob": "^12.12.0",
"@js-sdsl/ordered-set": "^4.4.2", "@js-sdsl/ordered-set": "^4.4.2",
"@swc/cli": "^0.4.0", "@types/async": "^3.2.12",
"@swc/core": "^1.7.4", "@types/utf8": "^3.0.1",
"JSONStream": "^1.0.0",
"agentkeepalive": "^4.1.3", "agentkeepalive": "^4.1.3",
"ajv": "^6.12.3", "ajv": "6.12.2",
"async": "^2.6.4", "async": "~2.1.5",
"aws-sdk": "^2.1005.0", "aws-sdk": "^2.1005.0",
"azure-storage": "~2.10.7",
"backo": "^1.1.0", "backo": "^1.1.0",
"base-x": "^3.0.8", "base-x": "3.0.8",
"base62": "^2.0.1", "base62": "2.0.1",
"bson": "^4.0.0", "bson": "4.0.0",
"debug": "^4.1.0", "debug": "~2.6.9",
"diskusage": "^1.1.1", "diskusage": "^1.1.1",
"fcntl": "git+https://git.yourcmc.ru/vitalif/zenko-fcntl.git", "fcntl": "github:scality/node-fcntl#0.2.2",
"httpagent": "git+https://git.yourcmc.ru/vitalif/zenko-httpagent.git#development/1.0", "hdclient": "scality/hdclient#1.1.0",
"https-proxy-agent": "^2.2.0", "https-proxy-agent": "^2.2.0",
"ioredis": "^4.28.5", "ioredis": "^4.28.5",
"ipaddr.js": "^1.9.1", "ipaddr.js": "1.9.1",
"joi": "^17.6.0", "joi": "^17.6.0",
"JSONStream": "^1.0.0", "level": "~5.0.1",
"level": "^5.0.1", "level-sublevel": "~6.6.5",
"level-sublevel": "^6.6.5", "mongodb": "^3.0.1",
"mongodb": "^5.2.0", "node-forge": "^0.7.1",
"node-forge": "^1.3.0", "prom-client": "14.2.0",
"prom-client": "^14.2.0", "simple-glob": "^0.2",
"simple-glob": "^0.2.0", "socket.io": "~4.6.1",
"socket.io": "^4.6.1", "socket.io-client": "~4.6.1",
"socket.io-client": "^4.6.1", "sproxydclient": "github:scality/sproxydclient#8.0.4",
"utf8": "^3.0.0", "utf8": "2.1.2",
"uuid": "^3.0.1", "uuid": "^3.0.1",
"werelogs": "git+https://git.yourcmc.ru/vitalif/zenko-werelogs.git#development/8.1", "werelogs": "scality/werelogs#8.1.4",
"xml2js": "^0.4.23" "xml2js": "~0.4.23"
}, },
"optionalDependencies": { "optionalDependencies": {
"ioctl": "^2.0.2" "ioctl": "^2.0.2"
@ -59,24 +59,22 @@
"@babel/preset-env": "^7.16.11", "@babel/preset-env": "^7.16.11",
"@babel/preset-typescript": "^7.16.7", "@babel/preset-typescript": "^7.16.7",
"@sinonjs/fake-timers": "^6.0.1", "@sinonjs/fake-timers": "^6.0.1",
"@types/async": "^3.2.12",
"@types/utf8": "^3.0.1",
"@types/ioredis": "^4.28.10", "@types/ioredis": "^4.28.10",
"@types/jest": "^27.4.1", "@types/jest": "^27.4.1",
"@types/node": "^18.19.41", "@types/node": "^17.0.21",
"@types/xml2js": "^0.4.11", "@types/xml2js": "^0.4.11",
"eslint": "^8.14.0", "eslint": "^8.12.0",
"eslint-config-airbnb-base": "^15.0.0", "eslint-config-airbnb": "6.2.0",
"eslint-config-scality": "git+https://git.yourcmc.ru/vitalif/zenko-eslint-config-scality.git", "eslint-config-scality": "scality/Guidelines#7.10.2",
"eslint-plugin-react": "^4.3.0", "eslint-plugin-react": "^4.3.0",
"jest": "^27.5.1", "jest": "^27.5.1",
"mongodb-memory-server": "^8.12.2", "mocha": "8.0.1",
"nyc": "^15.1.0", "mongodb-memory-server": "^6.0.2",
"sinon": "^9.0.2", "sinon": "^9.0.2",
"temp": "^0.9.1", "temp": "0.9.1",
"ts-jest": "^27.1.3", "ts-jest": "^27.1.3",
"ts-node": "^10.6.0", "ts-node": "^10.6.0",
"typescript": "^4.9.5" "typescript": "^4.6.2"
}, },
"scripts": { "scripts": {
"lint": "eslint $(git ls-files '*.js')", "lint": "eslint $(git ls-files '*.js')",
@ -84,28 +82,18 @@
"lint_yml": "yamllint $(git ls-files '*.yml')", "lint_yml": "yamllint $(git ls-files '*.yml')",
"test": "jest tests/unit", "test": "jest tests/unit",
"build": "tsc", "build": "tsc",
"prepack": "tsc", "prepare": "yarn build",
"postinstall": "[ -d build ] || swc -d build --copy-files package.json index.ts lib",
"ft_test": "jest tests/functional --testTimeout=120000 --forceExit", "ft_test": "jest tests/functional --testTimeout=120000 --forceExit",
"coverage": "nyc --clean jest tests --coverage --testTimeout=120000 --forceExit",
"build_doc": "cd documentation/listingAlgos/pics; dot -Tsvg delimiterStateChart.dot > delimiterStateChart.svg; dot -Tsvg delimiterMasterV0StateChart.dot > delimiterMasterV0StateChart.svg; dot -Tsvg delimiterVersionsStateChart.dot > delimiterVersionsStateChart.svg" "build_doc": "cd documentation/listingAlgos/pics; dot -Tsvg delimiterStateChart.dot > delimiterStateChart.svg; dot -Tsvg delimiterMasterV0StateChart.dot > delimiterMasterV0StateChart.svg; dot -Tsvg delimiterVersionsStateChart.dot > delimiterVersionsStateChart.svg"
}, },
"private": true, "private": true,
"jest": { "jest": {
"maxWorkers": 1, "maxWorkers": 1,
"coverageReporters": [
"json"
],
"collectCoverageFrom": [ "collectCoverageFrom": [
"lib/**/*.{js,ts}", "lib/**/*.{js,ts}",
"index.js" "index.js"
], ],
"preset": "ts-jest", "preset": "ts-jest",
"testEnvironment": "node",
"transform": {
"^.\\.ts?$": "ts-jest"
},
"transformIgnorePatterns": [],
"globals": { "globals": {
"test-jest": { "test-jest": {
"diagnostics": { "diagnostics": {
@ -113,12 +101,5 @@
} }
} }
} }
},
"nyc": {
"tempDirectory": "coverage",
"reporter": [
"lcov",
"text"
]
} }
} }

View File

@ -1,501 +0,0 @@
const async = require('async');
const assert = require('assert');
const sinon = require('sinon');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const { errors, versioning } = require('../../../../index');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const BucketInfo = require('../../../../lib/models/BucketInfo').default;
const MetadataWrapper =
require('../../../../lib/storage/metadata/MetadataWrapper');
const genVID = require('../../../../lib/versioning/VersionID').generateVersionId;
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'test-bucket';
const replicationGroupId = 'RG001';
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27018 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
let uidCounter = 0;
function generateVersionId() {
return genVID(`${process.pid}.${uidCounter++}`,
replicationGroupId);
}
const variations = [
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0 },
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1 },
];
describe('MongoClientInterface::metadata.deleteObjectMD', () => {
let metadata;
let collection;
function getObjectCount(cb) {
collection.countDocuments()
.then(count => cb(null, count))
.catch(err => cb(err));
}
function getObject(key, cb) {
collection.findOne({
_id: key,
}, {}).then(doc => {
if (!doc) {
return cb(errors.NoSuchKey);
}
return cb(null, doc.value);
}).catch(err => cb(err));
}
beforeAll(done => {
mongoserver.start().then(() => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27018',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.setup(done);
});
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
variations.forEach(variation => {
const itOnlyInV1 = variation.vFormat === 'v1' ? it : it.skip;
describe(`vFormat : ${variation.vFormat}`, () => {
beforeEach(done => {
const bucketMD = BucketInfo.fromObj({
_name: BUCKET_NAME,
_owner: 'testowner',
_ownerDisplayName: 'testdisplayname',
_creationDate: new Date().toJSON(),
_acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
_mdBucketModelVersion: 10,
_transient: false,
_deleted: false,
_serverSideEncryption: null,
_versioningConfiguration: null,
_locationConstraint: 'us-east-1',
_readLocationConstraint: null,
_cors: null,
_replicationConfiguration: null,
_lifecycleConfiguration: null,
_uid: '',
_isNFS: null,
ingestion: null,
});
async.series([
next => {
metadata.client.defaultBucketKeyFormat = variation.vFormat;
return next();
},
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
if (err) {
return next(err);
}
collection = metadata.client.getCollection(BUCKET_NAME);
return next();
}),
], done);
});
afterEach(done => {
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
it(`Should delete non versioned object ${variation.vFormat}`, done => {
const params = {
objName: 'non-deleted-object',
objVal: {
key: 'non-deleted-object',
versionId: 'null',
},
};
const versionParams = {
versioning: false,
versionId: null,
repairMaster: null,
};
return async.series([
next => {
// we put the master version of object
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// we put the master version of a second object
params.objName = 'object-to-deleted';
params.objVal.key = 'object-to-deleted';
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// We delete the first object
metadata.deleteObjectMD(BUCKET_NAME, params.objName, null, logger, next);
},
next => {
// Object must be removed
metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return next();
});
},
next => {
// only 1 object remaining in db
getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 1);
return next();
});
},
], done);
});
it(`Should not throw error when object non existent ${variation.vFormat}`, done => {
const objName = 'non-existent-object';
metadata.deleteObjectMD(BUCKET_NAME, objName, null, logger, err => {
assert.deepStrictEqual(err, null);
return done();
});
});
it(`Should not throw error when bucket non existent ${variation.vFormat}`, done => {
const objName = 'non-existent-object';
metadata.deleteObjectMD(BUCKET_NAME, objName, null, logger, err => {
assert.deepStrictEqual(err, null);
return done();
});
});
it(`Master should not be updated when non lastest version is deleted ${variation.vFormat}`, done => {
let versionId1 = null;
const params = {
objName: 'test-object',
objVal: {
key: 'test-object',
versionId: 'null',
},
vFormat: 'v0',
};
const versionParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
return async.series([
next => {
// we start by creating a new version and master
versionId1 = generateVersionId(this.replicationGroupId);
params.versionId = versionId1;
params.objVal.versionId = versionId1;
versionParams.versionId = versionId1;
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// we create a second version of the same object (master is updated)
params.objVal.versionId = 'version2';
versionParams.versionId = null;
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// we delete the first version
metadata.deleteObjectMD(BUCKET_NAME, params.objName, { versionId: versionId1 },
logger, next);
},
next => {
// the first version should no longer be available
metadata.getObjectMD(BUCKET_NAME, params.objName, { versionId: versionId1 }, logger, err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return next();
});
},
next => {
// master must be containing second version metadata
metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.notStrictEqual(data.versionId, versionId1);
return next();
});
},
next => {
// master and one version remaining in db
getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 2);
return next();
});
},
], done);
});
it(`Master should be updated when last version is deleted ${variation.vFormat}`, done => {
let versionId1;
let versionId2;
const params = {
objName: 'test-object',
objVal: {
key: 'test-object',
versionId: 'null',
isLast: false,
},
};
const versionParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
return async.series([
next => {
// we start by creating a new version and master
versionId1 = generateVersionId(this.replicationGroupId);
params.versionId = versionId1;
params.objVal.versionId = versionId1;
versionParams.versionId = versionId1;
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// we create a second version of the same object (master is updated)
// params.objVal.versionId = 'version2';
// versionParams.versionId = null;
versionId2 = generateVersionId(this.replicationGroupId);
params.versionId = versionId2;
params.objVal.versionId = versionId2;
versionParams.versionId = versionId2;
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, next);
},
next => {
// deleting latest version
metadata.deleteObjectMD(BUCKET_NAME, params.objName, { versionId: versionId2 },
logger, next);
},
next => {
// latest version must be removed
metadata.getObjectMD(BUCKET_NAME, params.objName, { versionId: versionId2 }, logger, err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return next();
});
},
next => {
// master must be updated to contain first version data
metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.versionId, versionId1);
return next();
});
},
next => {
// one master and version in the db
getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 2);
return next();
});
},
], done);
});
it(`Should fail when version id non existent ${variation.vFormat}`, done => {
const versionId = generateVersionId(this.replicationGroupId);
const objName = 'test-object';
metadata.deleteObjectMD(BUCKET_NAME, objName, { versionId }, logger, err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return done();
});
});
itOnlyInV1(`Should create master when delete marker removed ${variation.vFormat}`, done => {
const objVal = {
key: 'test-object',
isDeleteMarker: false,
};
const params = {
versioning: true,
versionId: null,
repairMaster: null,
};
let firstVersionVersionId;
let deleteMarkerVersionId;
async.series([
// We first create a new version and master
next => metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, (err, res) => {
if (err) {
return next(err);
}
firstVersionVersionId = JSON.parse(res).versionId;
return next();
}),
// putting a delete marker as last version
next => {
objVal.isDeleteMarker = true;
params.versionId = null;
return metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, (err, res) => {
if (err) {
return next(err);
}
deleteMarkerVersionId = JSON.parse(res).versionId;
return next();
});
},
next => {
// using fake clock to override the setTimeout used by the repair
const clock = sinon.useFakeTimers();
return metadata.deleteObjectMD(BUCKET_NAME, 'test-object', { versionId: deleteMarkerVersionId },
logger, () => {
// running the repair callback
clock.runAll();
clock.restore();
return next();
});
},
// waiting for the repair callback to finish
next => setTimeout(next, 100),
// master should be created
next => {
getObject('\x7fMtest-object', (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, 'test-object');
assert.strictEqual(object.versionId, firstVersionVersionId);
assert.strictEqual(object.isDeleteMarker, false);
return next();
});
},
], done);
});
itOnlyInV1(`Should delete master when delete marker becomes last version ${variation.vFormat}`, done => {
const objVal = {
key: 'test-object',
isDeleteMarker: false,
};
const params = {
versioning: true,
versionId: null,
repairMaster: null,
};
let versionId;
async.series([
// We first create a new version and master
next => metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, next),
// putting a delete marker as last version
next => {
objVal.isDeleteMarker = true;
params.versionId = null;
return metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, next);
},
// putting new version on top of delete marker
next => {
objVal.isDeleteMarker = false;
return metadata.putObjectMD(BUCKET_NAME, 'test-object', objVal, params, logger, (err, res) => {
if (err) {
return next(err);
}
versionId = JSON.parse(res).versionId;
return next();
});
},
next => {
// using fake clock to override the setTimeout used by the repair
const clock = sinon.useFakeTimers();
return metadata.deleteObjectMD(BUCKET_NAME, 'test-object', { versionId },
logger, () => {
// running the repair callback
clock.runAll();
clock.restore();
return next();
});
},
// waiting for the repair callback to finish
next => setTimeout(next, 100),
// master must be deleted
next => {
getObject('\x7fMtest-object', err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return next();
});
},
], done);
});
it('should delete the object directly if params.doesNotNeedOpogUpdate is true', done => {
const objName = 'object-to-delete';
const objVal = {
key: 'object-to-delete',
versionId: 'null',
};
const versionParams = {
versioning: false,
versionId: null,
repairMaster: null,
};
async.series([
next => {
metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
},
next => {
metadata.deleteObjectMD(BUCKET_NAME, objName, { doesNotNeedOpogUpdate: true }, logger, next);
},
next => {
metadata.getObjectMD(BUCKET_NAME, objName, null, logger, err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return next();
});
},
next => {
getObjectCount((err, count) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(count, 0);
return next();
});
},
], done);
});
it('should throw an error if params.doesNotNeedOpogUpdate is true and object does not exist', done => {
const objName = 'non-existent-object';
metadata.deleteObjectMD(BUCKET_NAME, objName, { doesNotNeedOpogUpdate: true }, logger, err => {
assert.deepStrictEqual(err, errors.InternalError);
return done();
});
});
});
});
});

View File

@ -1,303 +0,0 @@
const async = require('async');
const assert = require('assert');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const { errors, versioning } = require('../../../../index');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const BucketInfo = require('../../../../lib/models/BucketInfo').default;
const MetadataWrapper =
require('../../../../lib/storage/metadata/MetadataWrapper');
const genVID = versioning.VersionID.generateVersionId;
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const { formatMasterKey } = require('../../../../lib/storage/metadata/mongoclient/utils');
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'test-bucket';
const replicationGroupId = 'RG001';
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27019 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
let uidCounter = 0;
function generateVersionId() {
return genVID(`${process.pid}.${uidCounter++}`,
replicationGroupId);
}
const variations = [
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0 },
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1 },
];
describe('MongoClientInterface::metadata.getObjectMD', () => {
let metadata;
let collection;
let versionId1;
let versionId2;
let params = {
objName: 'pfx1-test-object',
objVal: {
key: 'pfx1-test-object',
versionId: 'null',
},
};
function updateMasterObject(objName, versionId, objVal, vFormat, cb) {
const mKey = formatMasterKey(objName, vFormat);
collection.updateOne(
{
_id: mKey,
$or: [{
'value.versionId': {
$exists: false,
},
},
{
'value.versionId': {
$gt: versionId,
},
},
],
},
{
$set: { _id: mKey, value: objVal },
},
{ upsert: true }).then(() => cb(null)).catch(err => cb(err));
}
/**
* Sets the "deleted" property to true
* @param {string} key object name
* @param {Function} cb callback
* @return {undefined}
*/
function flagObjectForDeletion(key, cb) {
collection.updateMany(
{ 'value.key': key },
{ $set: { 'value.deleted': true } },
{ upsert: false }).then(() => cb()).catch(err => cb(err));
}
beforeAll(done => {
mongoserver.start().then(() => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27019',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.setup(done);
});
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
variations.forEach(variation => {
const itOnlyInV1 = variation.vFormat === 'v1' ? it : it.skip;
describe(`vFormat : ${variation.vFormat}`, () => {
beforeEach(done => {
const bucketMD = BucketInfo.fromObj({
_name: BUCKET_NAME,
_owner: 'testowner',
_ownerDisplayName: 'testdisplayname',
_creationDate: new Date().toJSON(),
_acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
_mdBucketModelVersion: 10,
_transient: false,
_deleted: false,
_serverSideEncryption: null,
_versioningConfiguration: null,
_locationConstraint: 'us-east-1',
_readLocationConstraint: null,
_cors: null,
_replicationConfiguration: null,
_lifecycleConfiguration: null,
_uid: '',
_isNFS: null,
ingestion: null,
});
const versionParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
async.series([
next => {
metadata.client.defaultBucketKeyFormat = variation.vFormat;
return next();
},
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
if (err) {
return next(err);
}
collection = metadata.client.getCollection(BUCKET_NAME);
return next();
}),
next => {
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, (err, res) => {
if (err) {
return next(err);
}
versionId1 = JSON.parse(res).versionId;
return next(null);
});
},
next => {
metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal,
versionParams, logger, (err, res) => {
if (err) {
return next(err);
}
versionId2 = JSON.parse(res).versionId;
return next(null);
});
},
], done);
});
afterEach(done => {
// reset params
params = {
objName: 'pfx1-test-object',
objVal: {
key: 'pfx1-test-object',
versionId: 'null',
},
};
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
it(`Should return latest version of object ${variation.it}`, done =>
metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, params.objName);
assert.strictEqual(object.versionId, versionId2);
return done();
}));
it(`Should return the specified version of object ${variation.it}`, done =>
metadata.getObjectMD(BUCKET_NAME, params.objName, { versionId: versionId1 }, logger, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, params.objName);
assert.strictEqual(object.versionId, versionId1);
return done();
}));
it(`Should throw error when version non existent ${variation.it}`, done => {
const versionId = '1234567890';
return metadata.getObjectMD(BUCKET_NAME, params.objName, { versionId }, logger, (err, object) => {
assert.deepStrictEqual(object, undefined);
assert.deepStrictEqual(err, errors.NoSuchKey);
return done();
});
});
it(`Should throw error when object non existent ${variation.it}`, done => {
const objName = 'non-existent-object';
return metadata.getObjectMD(BUCKET_NAME, objName, null, logger, err => {
assert.deepStrictEqual(err, errors.NoSuchKey);
return done();
});
});
it(`Should throw error when object non existent ${variation.it}`, done => {
const bucketName = 'non-existent-bucket';
return metadata.getObjectMD(bucketName, params.objName, null, logger, (err, object) => {
assert.deepStrictEqual(object, undefined);
assert.deepStrictEqual(err, errors.NoSuchKey);
return done();
});
});
it(`Should return latest version when master is PHD ${variation.it}`, done => {
async.series([
next => {
const objectName = variation.vFormat === 'v0' ? 'pfx1-test-object' : '\x7fMpfx1-test-object';
// adding isPHD flag to master
const phdVersionId = generateVersionId();
params.objVal.versionId = phdVersionId;
params.objVal.isPHD = true;
updateMasterObject(objectName, phdVersionId, params.objVal,
variation.vFormat, next);
},
// Should return latest object version
next => metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, params.objName);
assert.strictEqual(object.versionId, versionId2);
delete params.isPHD;
return next();
}),
], done);
});
it('Should fail to get an object tagged for deletion', done => {
async.series([
next => flagObjectForDeletion(params.objName, next),
next => metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, object) => {
assert.deepStrictEqual(object, undefined);
assert.deepStrictEqual(err, errors.NoSuchKey);
return next();
}),
], done);
});
itOnlyInV1(`Should return last version when master deleted ${variation.vFormat}`, done => {
const versioningParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
async.series([
// putting a delete marker as last version
next => {
params.versionId = null;
params.objVal.isDeleteMarker = true;
return metadata.putObjectMD(BUCKET_NAME, params.objName, params.objVal, versioningParams,
logger, next);
},
next => metadata.getObjectMD(BUCKET_NAME, params.objName, null, logger, (err, object) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(object.key, params.objName);
assert.strictEqual(object.isDeleteMarker, true);
params.objVal.isDeleteMarker = null;
return next();
}),
], done);
});
});
});
});

View File

@ -1,331 +0,0 @@
const async = require('async');
const assert = require('assert');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const { versioning } = require('../../../../index');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const BucketInfo = require('../../../../lib/models/BucketInfo').default;
const MetadataWrapper =
require('../../../../lib/storage/metadata/MetadataWrapper');
const genVID = versioning.VersionID.generateVersionId;
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const { formatMasterKey, formatVersionKey } = require('../../../../lib/storage/metadata/mongoclient/utils');
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'test-bucket-batching';
const replicationGroupId = 'RG001';
const N = 10;
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27019 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
let uidCounter = 0;
function generateVersionId() {
return genVID(`${process.pid}.${uidCounter++}`,
replicationGroupId);
}
const variations = [
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0, versioning: false },
{ it: '(v0)', vFormat: BucketVersioningKeyFormat.v0, versioning: true },
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1, versioning: false },
{ it: '(v1)', vFormat: BucketVersioningKeyFormat.v1, versioning: true },
];
describe('MongoClientInterface::metadata.getObjectsMD', () => {
let metadata;
let collection;
let versionId2;
const params = {
key: 'pfx1-test-object',
objVal: {
key: 'pfx1-test-object',
versionId: 'null',
},
};
function updateMasterObject(objName, versionId, objVal, vFormat, cb) {
const mKey = formatMasterKey(objName, vFormat);
collection.updateOne(
{
_id: mKey,
$or: [{
'value.versionId': {
$exists: false,
},
},
{
'value.versionId': {
$gt: versionId,
},
},
],
},
{
$set: { _id: mKey, value: objVal },
},
{ upsert: true }).then(() => cb(null)).catch(err => cb(err));
}
/**
* Sets the "deleted" property to true
* @param {string} key object name
* @param {Function} cb callback
* @return {undefined}
*/
function flagObjectForDeletion(key, cb) {
collection.updateMany(
{ 'value.key': key },
{ $set: { 'value.deleted': true } },
{ upsert: false }).then(() => cb()).catch(err => cb(err));
}
beforeAll(done => {
mongoserver.start().then(() => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27019',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.setup(done);
});
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
variations.forEach(variation => {
const itOnlyInV1 = variation.vFormat === 'v1' && variation.versioning ? it : it.skip;
describe(`vFormat : ${variation.vFormat}, versioning: ${variation.versioning}`, () => {
let paramsArr = [];
beforeEach(done => {
// reset params
paramsArr = Array.from({ length: N }, (_, i) => ({
key: `pfx1-test-object${i + 1}`,
objVal: {
key: `pfx1-test-object${i + 1}`,
versionId: 'null',
},
}));
const bucketMD = BucketInfo.fromObj({
_name: BUCKET_NAME,
_owner: 'testowner',
_ownerDisplayName: 'testdisplayname',
_creationDate: new Date().toJSON(),
_acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
_mdBucketModelVersion: 10,
_transient: false,
_deleted: false,
_serverSideEncryption: null,
_versioningConfiguration: null,
_locationConstraint: 'us-east-1',
_readLocationConstraint: null,
_cors: null,
_replicationConfiguration: null,
_lifecycleConfiguration: null,
_uid: '',
_isNFS: null,
ingestion: null,
});
const versionParams = {
versioning: variation.versioning,
versionId: null,
repairMaster: null,
};
async.series([
next => {
metadata.client.defaultBucketKeyFormat = variation.vFormat;
return next();
},
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
if (err) {
return next(err);
}
collection = metadata.client.getCollection(BUCKET_NAME);
return next();
}),
next => {
async.eachSeries(paramsArr, (params, eachCb) => {
metadata.putObjectMD(BUCKET_NAME, params.key, params.objVal,
versionParams, logger, (err, res) => {
if (err) {
return eachCb(err);
}
if (variation.versioning) {
// eslint-disable-next-line no-param-reassign
params.versionId = JSON.parse(res).versionId;
}
return eachCb(null);
});
}, next);
},
next => {
metadata.putObjectMD(BUCKET_NAME, paramsArr[N - 1].key, paramsArr[N - 1].objVal,
versionParams, logger, (err, res) => {
if (err) {
return next(err);
}
if (variation.versioning) {
versionId2 = JSON.parse(res).versionId;
} else {
versionId2 = 'null';
}
return next(null);
});
},
], done);
});
afterEach(done => {
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
it(`should get ${N} objects${variation.versioning ? '' : ' master'} versions using batching`, done => {
const request = paramsArr.map(({ key, objVal }) => ({
key,
params: {
versionId: variation.versioning ? objVal.versionId : null,
},
}));
metadata.getObjectsMD(BUCKET_NAME, request, logger, (err, objects) => {
assert.strictEqual(err, null);
assert.strictEqual(objects.length, N);
objects.forEach((obj, i) => {
assert.strictEqual(obj.doc.key, paramsArr[i].key);
if (variation.versioning) {
assert.strictEqual(obj.doc.versionId, paramsArr[i].objVal.versionId);
}
});
return done();
});
});
it('should not throw an error if object or version is inexistent and return null doc', done => {
const request = [{
key: 'nonexistent',
params: {
versionId: variation.versioning ? 'nonexistent' : null,
},
}];
metadata.getObjectsMD(BUCKET_NAME, request, logger, (err, objects) => {
assert.strictEqual(err, null);
assert.strictEqual(objects.length, 1);
assert.strictEqual(objects[0].doc, null);
done();
});
});
it(`should return latest version when master is PHD ${variation.it}`, done => {
if (!variation.versioning) {
return done();
}
const request = paramsArr.map(({ key, objVal }) => ({
key,
params: {
versionId: variation.versioning ? objVal.versionId : null,
},
}));
return async.series([
next => {
let objectName = null;
if (variations.versioning) {
objectName =
formatVersionKey(paramsArr[N - 1].key, paramsArr[N - 1].versionId, variation.vFormat);
} else {
objectName = formatMasterKey(paramsArr[N - 1].key, variation.vFormat);
}
// adding isPHD flag to master
const phdVersionId = generateVersionId();
paramsArr[N - 1].objVal.versionId = phdVersionId;
paramsArr[N - 1].objVal.isPHD = true;
updateMasterObject(objectName, phdVersionId, paramsArr[N - 1].objVal,
variation.vFormat, next);
},
// Should return latest object version
next => metadata.getObjectsMD(BUCKET_NAME, request, logger, (err, objects) => {
assert.deepStrictEqual(err, null);
objects.forEach((obj, i) => {
assert.strictEqual(obj.doc.key, paramsArr[i].objVal.key);
if (variation.versioning && i === N - 1) {
assert.strictEqual(obj.doc.versionId, versionId2);
} else {
assert.strictEqual(obj.doc.versionId, paramsArr[i].objVal.versionId);
}
});
delete params.isPHD;
return next();
}),
], done);
});
it('should fail to get an object tagged for deletion', done => {
const key = paramsArr[0].key;
flagObjectForDeletion(key, err => {
assert(err);
metadata.getObjectsMD(BUCKET_NAME, [{ key }], logger, (err, object) => {
assert.strictEqual(err, null);
assert.strictEqual(object[0].doc, null);
done();
});
});
});
itOnlyInV1(`Should return last version when master deleted ${variation.vFormat}`, done => {
const versioningParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
async.series([
// putting a delete marker as last version
next => {
paramsArr[0].versionId = null;
paramsArr[0].objVal.isDeleteMarker = true;
return metadata.putObjectMD(BUCKET_NAME, paramsArr[0].key, paramsArr[0].objVal,
versioningParams, logger, next);
},
next => metadata.getObjectsMD(BUCKET_NAME, [{ key: paramsArr[0].key }], logger, (err, objects) => {
assert.strictEqual(err, null);
assert.strictEqual(objects[0].doc.key, paramsArr[0].key);
assert.strictEqual(objects[0].doc.isDeleteMarker, true);
paramsArr[0].objVal.isDeleteMarker = null;
return next();
}),
], done);
});
});
});
});

View File

@ -1,744 +0,0 @@
const async = require('async');
const assert = require('assert');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const MetadataWrapper =
require('../../../../../lib/storage/metadata/MetadataWrapper');
const { versioning } = require('../../../../../index');
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const { assertContents, flagObjectForDeletion, makeBucketMD, putBulkObjectVersions } = require('./utils');
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'test-lifecycle-list-current-bucket';
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27020 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
describe('MongoClientInterface::metadata.listLifecycleObject::current', () => {
let metadata;
let collection;
const expectedVersionIds = {};
const location1 = 'loc1';
const location2 = 'loc2';
beforeAll(done => {
mongoserver.start().then(() => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27020',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.setup(done);
});
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
[BucketVersioningKeyFormat.v0, BucketVersioningKeyFormat.v1].forEach(v => {
describe(`bucket format version: ${v}`, () => {
beforeEach(done => {
const bucketMD = makeBucketMD(BUCKET_NAME);
const versionParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
metadata.client.defaultBucketKeyFormat = v;
async.series([
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
if (err) {
return next(err);
}
collection = metadata.client.getCollection(BUCKET_NAME);
return next();
}),
next => {
const objName = 'pfx1-test-object';
const objVal = {
key: 'pfx1-test-object',
versionId: 'null',
dataStoreName: location1,
};
const nbVersions = 5;
const timestamp = 0;
putBulkObjectVersions(metadata, BUCKET_NAME, objName, objVal, versionParams,
nbVersions, timestamp, logger, (err, data) => {
expectedVersionIds[objName] = data.lastVersionId;
return next(err);
});
/* eslint-disable max-len */
// The following versions are created:
// { "_id" : "Mpfx1-test-object", "value" : { "key" : "pfx1-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
// { "_id" : "Vpfx1-test-object{sep}id4", "value" : { "key" : "pfx1-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
// { "_id" : "Vpfx1-test-object{sep}id3", "value" : { "key" : "pfx1-test-object", "versionId" : "vid3", "last-modified" : "1970-01-01T00:00:00.004Z" } }
// { "_id" : "Vpfx1-test-object{sep}id2", "value" : { "key" : "pfx1-test-object", "versionId" : "vid2", "last-modified" : "1970-01-01T00:00:00.003Z" } }
// { "_id" : "Vpfx1-test-object{sep}id1", "value" : { "key" : "pfx1-test-object", "versionId" : "vid1", "last-modified" : "1970-01-01T00:00:00.002Z" } }
// { "_id" : "Vpfx1-test-object{sep}id0", "value" : { "key" : "pfx1-test-object", "versionId" : "vid0", "last-modified" : "1970-01-01T00:00:00.001Z" } }
/* eslint-enable max-len */
},
next => {
const objName = 'pfx2-test-object';
const objVal = {
key: 'pfx2-test-object',
versionId: 'null',
dataStoreName: location2,
};
const nbVersions = 5;
const timestamp = 2000;
putBulkObjectVersions(metadata, BUCKET_NAME, objName, objVal, versionParams,
nbVersions, timestamp, logger, (err, data) => {
expectedVersionIds[objName] = data.lastVersionId;
return next(err);
});
/* eslint-disable max-len */
// The following versions are created:
// { "_id" : "Mpfx2-test-object", "value" : { "key" : "pfx2-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:02.005Z" } }
// { "_id" : "Vpfx2-test-object{sep}id4", "value" : { "key" : "pfx2-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:02.005Z" } }
// { "_id" : "Vpfx2-test-object{sep}id3", "value" : { "key" : "pfx2-test-object", "versionId" : "vid3", "last-modified" : "1970-01-01T00:00:02.004Z" } }
// { "_id" : "Vpfx2-test-object{sep}id2", "value" : { "key" : "pfx2-test-object", "versionId" : "vid2", "last-modified" : "1970-01-01T00:00:02.003Z" } }
// { "_id" : "Vpfx2-test-object{sep}id1", "value" : { "key" : "pfx2-test-object", "versionId" : "vid1", "last-modified" : "1970-01-01T00:00:02.002Z" } }
// { "_id" : "Vpfx1-test-object{sep}id0", "value" : { "key" : "pfx2-test-object", "versionId" : "vid0", "last-modified" : "1970-01-01T00:00:02.001Z" } }
/* eslint-enable max-len */
},
next => {
const objName = 'pfx3-test-object';
const objVal = {
key: 'pfx3-test-object',
versionId: 'null',
dataStoreName: location1,
};
const nbVersions = 5;
const timestamp = 1000;
putBulkObjectVersions(metadata, BUCKET_NAME, objName, objVal, versionParams,
nbVersions, timestamp, logger, (err, data) => {
expectedVersionIds[objName] = data.lastVersionId;
return next(err);
});
/* eslint-disable max-len */
// The following versions are created:
// { "_id" : "Mpfx3-test-object", "value" : { "key" : "pfx3-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:01.005Z" } }
// { "_id" : "Vpfx3-test-object{sep}id4", "value" : { "key" : "pfx3-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:01.005Z" } }
// { "_id" : "Vpfx3-test-object{sep}id3", "value" : { "key" : "pfx3-test-object", "versionId" : "vid3", "last-modified" : "1970-01-01T00:00:01.004Z" } }
// { "_id" : "Vpfx3-test-object{sep}id2", "value" : { "key" : "pfx3-test-object", "versionId" : "vid2", "last-modified" : "1970-01-01T00:00:01.003Z" } }
// { "_id" : "Vpfx3-test-object{sep}id1", "value" : { "key" : "pfx3-test-object", "versionId" : "vid1", "last-modified" : "1970-01-01T00:00:01.002Z" } }
// { "_id" : "Vpfx3-test-object{sep}id0", "value" : { "key" : "pfx3-test-object", "versionId" : "vid0", "last-modified" : "1970-01-01T00:00:01.001Z" } }
/* eslint-enable max-len */
},
], done);
});
afterEach(done => {
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
it('Should list current versions of objects', done => {
const params = {
listingType: 'DelimiterCurrent',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 3);
const expected = [
{
key: 'pfx1-test-object',
LastModified: '1970-01-01T00:00:00.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx1-test-object'],
},
{
key: 'pfx2-test-object',
LastModified: '1970-01-01T00:00:02.005Z',
dataStoreName: location2,
VersionId: expectedVersionIds['pfx2-test-object'],
},
{
key: 'pfx3-test-object',
LastModified: '1970-01-01T00:00:01.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx3-test-object'],
},
];
assertContents(data.Contents, expected);
return done();
});
});
it('Should list current versions of objects excluding keys stored in location2', done => {
const params = {
listingType: 'DelimiterCurrent',
excludedDataStoreName: location2,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 2);
const expected = [
{
key: 'pfx1-test-object',
LastModified: '1970-01-01T00:00:00.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx1-test-object'],
},
{
key: 'pfx3-test-object',
LastModified: '1970-01-01T00:00:01.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx3-test-object'],
},
];
assertContents(data.Contents, expected);
return done();
});
});
it('Should list current versions of objects excluding keys stored in location1', done => {
const params = {
listingType: 'DelimiterCurrent',
excludedDataStoreName: location1,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 1);
const expected = [
{
key: 'pfx2-test-object',
LastModified: '1970-01-01T00:00:02.005Z',
dataStoreName: location2,
VersionId: expectedVersionIds['pfx2-test-object'],
},
];
assertContents(data.Contents, expected);
return done();
});
});
it('Should list current versions of objects with prefix and excluding keys stored in location2', done => {
const params = {
listingType: 'DelimiterCurrent',
excludedDataStoreName: location2,
prefix: 'pfx3',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 1);
const expected = [
{
key: 'pfx3-test-object',
LastModified: '1970-01-01T00:00:01.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx3-test-object'],
},
];
assertContents(data.Contents, expected);
return done();
});
});
it('Should return trucated list of current versions excluding keys stored in location2', done => {
const params = {
listingType: 'DelimiterCurrent',
excludedDataStoreName: location2,
maxKeys: 1,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.NextMarker, 'pfx1-test-object');
const expected = [
{
key: 'pfx1-test-object',
LastModified: '1970-01-01T00:00:00.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx1-test-object'],
},
];
assertContents(data.Contents, expected);
params.marker = 'pfx1-test-object';
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 1);
const expected = [
{
key: 'pfx3-test-object',
LastModified: '1970-01-01T00:00:01.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx3-test-object'],
},
];
assertContents(data.Contents, expected);
return done();
});
});
});
it('Should return empty list when beforeDate is before the objects creation date', done => {
const params = {
listingType: 'DelimiterCurrent',
beforeDate: '1970-01-01T00:00:00.000Z',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 0);
return done();
});
});
it('Should return the current version modified before 1970-01-01T00:00:00.010Z', done => {
const params = {
listingType: 'DelimiterCurrent',
beforeDate: '1970-01-01T00:00:00.10Z',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 1);
const expected = [
{
key: 'pfx1-test-object',
LastModified: '1970-01-01T00:00:00.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx1-test-object'],
},
];
assertContents(data.Contents, expected);
return done();
});
});
it('Should return the current versions modified before 1970-01-01T00:00:01.010Z', done => {
const params = {
listingType: 'DelimiterCurrent',
beforeDate: '1970-01-01T00:00:01.010Z',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 2);
const expected = [
{
key: 'pfx1-test-object',
LastModified: '1970-01-01T00:00:00.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx1-test-object'],
},
{
key: 'pfx3-test-object',
LastModified: '1970-01-01T00:00:01.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx3-test-object'],
},
];
assertContents(data.Contents, expected);
return done();
});
});
it('Should return the current versions modified before 1970-01-01T00:00:02.010Z', done => {
const params = {
listingType: 'DelimiterCurrent',
beforeDate: '1970-01-01T00:00:02.010Z',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 3);
const expected = [
{
key: 'pfx1-test-object',
LastModified: '1970-01-01T00:00:00.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx1-test-object'],
},
{
key: 'pfx2-test-object',
LastModified: '1970-01-01T00:00:02.005Z',
dataStoreName: location2,
VersionId: expectedVersionIds['pfx2-test-object'],
},
{
key: 'pfx3-test-object',
LastModified: '1970-01-01T00:00:01.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx3-test-object'],
},
];
assertContents(data.Contents, expected);
return done();
});
});
it('Should truncate the list of current versions modified before 1970-01-01T00:00:01.010Z', done => {
const params = {
listingType: 'DelimiterCurrent',
beforeDate: '1970-01-01T00:00:01.010Z',
maxKeys: 1,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.NextMarker, 'pfx1-test-object');
const expected = [
{
key: 'pfx1-test-object',
LastModified: '1970-01-01T00:00:00.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx1-test-object'],
},
];
assertContents(data.Contents, expected);
params.marker = 'pfx1-test-object';
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 1);
const expected = [
{
key: 'pfx3-test-object',
LastModified: '1970-01-01T00:00:01.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx3-test-object'],
},
];
assertContents(data.Contents, expected);
return done();
});
});
});
it('Should truncate list of current versions of objects', done => {
const params = {
listingType: 'DelimiterCurrent',
maxKeys: 2,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.NextMarker, 'pfx2-test-object');
assert.strictEqual(data.Contents.length, 2);
const expected = [
{
key: 'pfx1-test-object',
LastModified: '1970-01-01T00:00:00.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx1-test-object'],
},
{
key: 'pfx2-test-object',
LastModified: '1970-01-01T00:00:02.005Z',
dataStoreName: location2,
VersionId: expectedVersionIds['pfx2-test-object'],
},
];
assertContents(data.Contents, expected);
return done();
});
});
it('Should list the following current versions of objects', done => {
const params = {
listingType: 'DelimiterCurrent',
marker: 'pfx2-test-object',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 1);
const expected = [
{
key: 'pfx3-test-object',
LastModified: '1970-01-01T00:00:01.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx3-test-object'],
},
];
assertContents(data.Contents, expected);
return done();
});
});
it('Should list current versions that start with prefix', done => {
const params = {
listingType: 'DelimiterCurrent',
prefix: 'pfx2',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 1);
const expected = [
{
key: 'pfx2-test-object',
LastModified: '1970-01-01T00:00:02.005Z',
dataStoreName: location2,
VersionId: expectedVersionIds['pfx2-test-object'],
},
];
assertContents(data.Contents, expected);
return done();
});
});
it('Should return the list of current versions modified before 1970-01-01T00:00:01.010Z with prefix pfx1',
done => {
const params = {
listingType: 'DelimiterCurrent',
beforeDate: '1970-01-01T00:00:01.010Z',
maxKeys: 1,
prefix: 'pfx1',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 1);
const expected = [
{
key: 'pfx1-test-object',
LastModified: '1970-01-01T00:00:00.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx1-test-object'],
},
];
assertContents(data.Contents, expected);
return done();
});
});
it('Should not list deleted version', done => {
const objVal = {
'key': 'pfx4-test-object',
'last-modified': new Date(0).toISOString(),
};
const versionParams = {
versioning: true,
};
const params = {
listingType: 'DelimiterCurrent',
};
let deletedVersionId;
async.series([
next => metadata.putObjectMD(BUCKET_NAME, objVal.key, objVal, versionParams,
logger, (err, res) => {
if (err) {
return next(err);
}
deletedVersionId = JSON.parse(res).versionId;
return next(null);
}),
next => metadata.deleteObjectMD(BUCKET_NAME, objVal.key,
{ versionId: deletedVersionId }, logger, next),
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.Contents.length, 3);
const expected = [
{
key: 'pfx1-test-object',
LastModified: '1970-01-01T00:00:00.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx1-test-object'],
},
{
key: 'pfx2-test-object',
LastModified: '1970-01-01T00:00:02.005Z',
dataStoreName: location2,
VersionId: expectedVersionIds['pfx2-test-object'],
},
{
key: 'pfx3-test-object',
LastModified: '1970-01-01T00:00:01.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx3-test-object'],
},
];
assertContents(data.Contents, expected);
return next();
}),
], done);
});
it('Should not list object with delete marker', done => {
const objVal = {
'key': 'pfx4-test-object',
'last-modified': new Date(0).toISOString(),
};
const dmObjVal = { ...objVal, isDeleteMarker: true };
const versionParams = {
versioning: true,
};
const params = {
listingType: 'DelimiterCurrent',
};
async.series([
next => metadata.putObjectMD(BUCKET_NAME, objVal.key, objVal, versionParams, logger, next),
next => metadata.putObjectMD(BUCKET_NAME, objVal.key, dmObjVal, versionParams, logger, next),
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.Contents.length, 3);
const expected = [
{
key: 'pfx1-test-object',
LastModified: '1970-01-01T00:00:00.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx1-test-object'],
},
{
key: 'pfx2-test-object',
LastModified: '1970-01-01T00:00:02.005Z',
dataStoreName: location2,
VersionId: expectedVersionIds['pfx2-test-object'],
},
{
key: 'pfx3-test-object',
LastModified: '1970-01-01T00:00:01.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx3-test-object'],
},
];
assertContents(data.Contents, expected);
return next();
}),
], done);
});
it('Should not list phd master key when listing current versions', done => {
const objVal = {
'key': 'pfx4-test-object',
'versionId': 'null',
'last-modified': new Date(0).toISOString(),
};
const versionParams = {
versioning: true,
};
const params = {
listingType: 'DelimiterCurrent',
prefix: 'pfx4',
};
let versionId;
let lastVersionId;
async.series([
next => metadata.putObjectMD(BUCKET_NAME, 'pfx4-test-object', objVal, versionParams,
logger, (err, res) => {
if (err) {
return next(err);
}
versionId = JSON.parse(res).versionId;
return next(null);
}),
next => metadata.putObjectMD(BUCKET_NAME, 'pfx4-test-object', objVal, versionParams,
logger, (err, res) => {
if (err) {
return next(err);
}
lastVersionId = JSON.parse(res).versionId;
return next(null);
}),
next => metadata.deleteObjectMD(BUCKET_NAME, 'pfx4-test-object', { versionId: lastVersionId },
logger, next),
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.Contents[0].value.VersionId, versionId);
return next();
}),
], done);
});
it('Should not list the current version tagged for deletion', done => {
const objVal = {
'key': 'pfx4-test-object',
'last-modified': new Date(0).toISOString(),
};
const versionParams = {
versioning: true,
};
const params = {
listingType: 'DelimiterCurrent',
};
async.series([
next => metadata.putObjectMD(BUCKET_NAME, objVal.key, objVal, versionParams,
logger, next),
next => flagObjectForDeletion(collection, objVal.key, next),
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.Contents.length, 3);
const expected = [
{
key: 'pfx1-test-object',
LastModified: '1970-01-01T00:00:00.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx1-test-object'],
},
{
key: 'pfx2-test-object',
LastModified: '1970-01-01T00:00:02.005Z',
dataStoreName: location2,
VersionId: expectedVersionIds['pfx2-test-object'],
},
{
key: 'pfx3-test-object',
LastModified: '1970-01-01T00:00:01.005Z',
dataStoreName: location1,
VersionId: expectedVersionIds['pfx3-test-object'],
},
];
assertContents(data.Contents, expected);
return next();
}),
], done);
});
});
});
});

View File

@ -1,215 +0,0 @@
const async = require('async');
const assert = require('assert');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const MetadataWrapper =
require('../../../../../lib/storage/metadata/MetadataWrapper');
const { versioning } = require('../../../../../index');
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const { makeBucketMD } = require('./utils');
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'test-lifecycle-list-bucket-null';
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27020 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
describe('MongoClientInterface::metadata.listLifecycleObject::nullVersion', () => {
let metadata;
beforeAll(done => {
mongoserver.start().then(() => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27020',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.setup(done);
});
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
[BucketVersioningKeyFormat.v0, BucketVersioningKeyFormat.v1].forEach(v => {
describe(`bucket format version: ${v}`, () => {
beforeEach(done => {
const bucketMD = makeBucketMD(BUCKET_NAME);
const versionParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
metadata.client.defaultBucketKeyFormat = v;
async.series([
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, next),
next => {
const objName = 'key0';
const timestamp = 0;
const lastModified = new Date(timestamp).toISOString();
const objVal = {
'key': objName,
'versionId': 'null',
'isNull': true,
'last-modified': lastModified,
};
return metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
},
next => {
const objName = 'key1';
const timestamp = 0;
const lastModified = new Date(timestamp).toISOString();
const objVal = {
'key': objName,
'versionId': 'null',
'isNull': true,
'last-modified': lastModified,
};
return metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
},
next => {
const objName = 'key1';
const timestamp = 0;
const lastModified = new Date(timestamp).toISOString();
const objVal = {
'key': objName,
'last-modified': lastModified,
};
return metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
},
// key2 simulates a scenario where:
// 1) bucket is versioned
// 2) put object key2
// 3) bucket versioning gets suspended
// 4) put object key2
// result:
// {
// "_id" : "Mkey0",
// "value" : {
// "key" : "key2",
// "isNull" : true,
// "versionId" : "<VersionId2>",
// "last-modified" : "2023-07-11T14:16:00.151Z",
// }
// },
// {
// "_id" : "Vkey0\u0000<VersionId1>",
// "value" : {
// "key" : "key2",
// "versionId" : "<VersionId1>",
// "tags" : {
// },
// "last-modified" : "2023-07-11T14:15:36.713Z",
// }
// },
next => {
const objName = 'key2';
const timestamp = 0;
const lastModified = new Date(timestamp).toISOString();
const objVal = {
'key': objName,
'last-modified': lastModified,
};
return metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
},
next => {
const objName = 'key2';
const timestamp = 0;
const params = {
versionId: '',
};
const lastModified = new Date(timestamp).toISOString();
const objVal = {
'key': objName,
'last-modified': lastModified,
'isNull': true,
};
return metadata.putObjectMD(BUCKET_NAME, objName, objVal, params, logger, next);
},
], done);
});
afterEach(done => metadata.deleteBucket(BUCKET_NAME, logger, done));
it('Should list the null current version and set IsNull to true', done => {
const params = {
listingType: 'DelimiterCurrent',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 3);
// check that key0 has a null current version
const firstKey = data.Contents[0];
assert.strictEqual(firstKey.key, 'key0');
assert.strictEqual(firstKey.value.IsNull, true);
// check that key1 has no null current version
const secondKey = data.Contents[1];
assert.strictEqual(secondKey.key, 'key1');
assert(!secondKey.value.IsNull);
// check that key2 has a null current version
const thirdKey = data.Contents[2];
assert.strictEqual(thirdKey.key, 'key2');
assert.strictEqual(thirdKey.value.IsNull, true);
return done();
});
});
it('Should list the null non-current version and set IsNull to true', done => {
const params = {
listingType: 'DelimiterNonCurrent',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 2);
// check that key1 has a null non-current version
const firstKey = data.Contents[0];
assert.strictEqual(firstKey.key, 'key1');
assert.strictEqual(firstKey.value.IsNull, true);
// check that key2 has no null non-current version
const secondKey = data.Contents[1];
assert.strictEqual(secondKey.key, 'key2');
assert(!secondKey.value.IsNull);
return done();
});
});
});
});
});

View File

@ -1,455 +0,0 @@
const async = require('async');
const assert = require('assert');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const MetadataWrapper =
require('../../../../../lib/storage/metadata/MetadataWrapper');
const { versioning } = require('../../../../../index');
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const { makeBucketMD, putBulkObjectVersions } = require('./utils');
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'test-lifecycle-list-orphan-bucket';
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27020 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
describe('MongoClientInterface::metadata.listLifecycleObject::orphan', () => {
let metadata;
beforeAll(done => {
mongoserver.start().then(() => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27020',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.setup(done);
});
});
});
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()
.then(() => next())
.catch(next),
], done);
});
[BucketVersioningKeyFormat.v0, BucketVersioningKeyFormat.v1].forEach(v => {
describe(`bucket format version: ${v}`, () => {
beforeEach(done => {
const bucketMD = makeBucketMD(BUCKET_NAME);
const versionParams = {
versioning: true,
versionId: null,
repairMaster: null,
};
metadata.client.defaultBucketKeyFormat = v;
async.series([
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, next),
next => {
const keyName = 'pfx0-test-object';
const objVal = {
'key': keyName,
'isDeleteMarker': true,
'last-modified': new Date(0).toISOString(), // 1970-01-01T00:00:00.000Z
};
const params = {
versioning: true,
};
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
},
next => {
const params = {
objName: 'pfx1-test-object',
objVal: {
key: 'pfx1-test-object',
versionId: 'null',
},
nbVersions: 1,
};
const timestamp = 0;
putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
params.nbVersions, timestamp, logger, next);
},
next => {
const params = {
objName: 'pfx2-test-object',
objVal: {
key: 'pfx2-test-object',
versionId: 'null',
},
nbVersions: 1,
};
const timestamp = 0;
putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
params.nbVersions, timestamp, logger, next);
},
next => {
const keyName = 'pfx2-test-object';
const objVal = {
'key': keyName,
'isDeleteMarker': true,
'last-modified': new Date(2).toISOString(), // 1970-01-01T00:00:00.002Z
};
const params = {
versioning: true,
};
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
},
next => {
const keyName = 'pfx3-test-object';
const objVal = {
'key': keyName,
'isDeleteMarker': true,
'last-modified': new Date(0).toISOString(), // 1970-01-01T00:00:00.000Z
};
const params = {
versioning: true,
};
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
},
next => {
const keyName = 'pfx4-test-object';
const objVal = {
'key': keyName,
'isDeleteMarker': true,
'last-modified': new Date(5).toISOString(), // 1970-01-01T00:00:00.005Z
};
const params = {
versioning: true,
};
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
},
next => {
const keyName = 'pfx4-test-object2';
const objVal = {
'key': keyName,
'isDeleteMarker': true,
'last-modified': new Date(6).toISOString(), // 1970-01-01T00:00:00.006Z
};
const params = {
versioning: true,
};
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
},
], done);
});
/* eslint-disable max-len */
// { "_id" : "Mpfx1-test-object", "value" : { "key" : "pfx1-test-object", "versionId" : "v1", "last-modified" : "1970-01-01T00:00:00.001Z" } }
// { "_id" : "Vpfx0-test-object{sep}v0", "value" : { "key" : "pfx0-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.000Z", "versionId" : "v0" } }
// { "_id" : "Vpfx1-test-object{sep}v1", "value" : { "key" : "pfx1-test-object", "versionId" : "v1", "last-modified" : "1970-01-01T00:00:00.001Z" } }
// { "_id" : "Vpfx2-test-object{sep}v3", "value" : { "key" : "pfx2-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.002Z", "versionId" : "v3" } }
// { "_id" : "Vpfx2-test-object{sep}v2", "value" : { "key" : "pfx2-test-object", "versionId" : "v2", "last-modified" : "1970-01-01T00:00:00.001Z" } }
// { "_id" : "Vpfx3-test-object{sep}v4", "value" : { "key" : "pfx3-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.000Z", "versionId" : "v4" } }
// { "_id" : "Vpfx4-test-object{sep}v5", "value" : { "key" : "pfx4-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.005Z", "versionId" : "v5" } }
// { "_id" : "Vpfx4-test-object2{sep}v6", "value" : { "key" : "pfx4-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.006Z", "versionId" : "v6" } }
/* eslint-enable max-len */
afterEach(done => {
metadata.deleteBucket(BUCKET_NAME, logger, done);
});
it('Should list orphan delete markers', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 4);
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
assert.strictEqual(data.Contents[2].key, 'pfx4-test-object');
assert.strictEqual(data.Contents[3].key, 'pfx4-test-object2');
return done();
});
});
it('Should return empty list when beforeDate is before youngest last-modified', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
beforeDate: '1970-01-01T00:00:00.000Z',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 0);
return done();
});
});
it('Should list orphan delete markers older than 1970-01-01T00:00:00.003Z', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
beforeDate: '1970-01-01T00:00:00.003Z',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 2);
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
return done();
});
});
it('Should return the first part of the orphan delete markers listing', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
maxKeys: 1,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.NextMarker, 'pfx0-test-object');
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
return done();
});
});
it('Should return the second part of the orphan delete markers listing', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
marker: 'pfx0-test-object',
maxKeys: 1,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.NextMarker, 'pfx3-test-object');
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx3-test-object');
return done();
});
});
it('Should return the third part of the orphan delete markers listing', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
marker: 'pfx3-test-object',
maxKeys: 1,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.NextMarker, 'pfx4-test-object');
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
return done();
});
});
it('Should return the fourth part of the orphan delete markers listing', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
marker: 'pfx4-test-object',
maxKeys: 1,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object2');
return done();
});
});
it('Should list the two first orphan delete markers', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
maxKeys: 2,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.Contents.length, 2);
assert.strictEqual(data.NextMarker, 'pfx3-test-object');
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
return done();
});
});
it('Should list the four first orphan delete markers', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
maxKeys: 4,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 4);
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
assert.strictEqual(data.Contents[2].key, 'pfx4-test-object');
assert.strictEqual(data.Contents[3].key, 'pfx4-test-object2');
return done();
});
});
it('Should return an empty list if no orphan delete marker starts with prefix pfx2', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
prefix: 'pfx2',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 0);
return done();
});
});
it('Should list orphan delete markers that start with prefix pfx4', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
prefix: 'pfx4',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 2);
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx4-test-object2');
return done();
});
});
it('Should return the first orphan delete marker version that starts with prefix', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
prefix: 'pfx4',
maxKeys: 1,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.NextMarker, 'pfx4-test-object');
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
return done();
});
});
it('Should return the following orphan delete marker version that starts with prefix', done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
marker: 'pfx4-test-object',
prefix: 'pfx4',
maxKeys: 1,
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert(!data.NextMarker);
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object2');
return done();
});
});
it('Should return the truncated list of orphan delete markers older than 1970-01-01T00:00:00.006Z',
done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
maxKeys: 2,
beforeDate: '1970-01-01T00:00:00.006Z',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.Contents.length, 2);
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
assert.strictEqual(data.NextMarker, 'pfx3-test-object');
return done();
});
});
it('Should return the following list of orphan delete markers older than 1970-01-01T00:00:00.006Z',
done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
maxKeys: 2,
beforeDate: '1970-01-01T00:00:00.006Z',
marker: 'pfx3-test-object',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, false);
assert.strictEqual(data.Contents.length, 1);
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
return done();
});
});
it('Should return the truncated list of orphan delete markers older than 1970-01-01T00:00:00.001Z',
done => {
const params = {
listingType: 'DelimiterOrphanDeleteMarker',
maxKeys: 2,
beforeDate: '1970-01-01T00:00:00.001Z',
};
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.IsTruncated, true);
assert.strictEqual(data.Contents.length, 2);
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
assert.strictEqual(data.NextMarker, 'pfx3-test-object');
return done();
});
});
});
});
});

View File

@ -1,104 +0,0 @@
const async = require('async');
const BucketInfo = require('../../../../../lib/models/BucketInfo').default;
const assert = require('assert');
/**
* Puts multpile versions of an object
* @param {Object} metadata - metadata client
* @param {String} bucketName - bucket name
* @param {String} objName - object key
* @param {Object} objVal - object metadata
* @param {Object} params - versioning parameters
* @param {number} versionNb - number of versions to put
* @param {number} timestamp - used for last-modified
* @param {Object} logger - a Logger instance
* @param {Function} cb - callback
* @returns {undefined}
*/
function putBulkObjectVersions(metadata, bucketName, objName, objVal, params, versionNb, timestamp, logger, cb) {
let count = 0;
const versionIds = [];
return async.whilst(
() => count < versionNb,
cbIterator => {
count++;
const lastModified = new Date(timestamp + count).toISOString();
const finalObjectVal = Object.assign(objVal, { 'last-modified': lastModified });
return metadata.putObjectMD(bucketName, objName, finalObjectVal, params, logger, (err, data) => {
versionIds.push(JSON.parse(data).versionId);
return cbIterator(err, versionIds);
});
}, (err, expectedVersionIds) => {
// The last version is removed since it represents the current version.
const lastVersionId = expectedVersionIds.pop();
// array is reversed to be alligned with the version order (latest to oldest).
expectedVersionIds.reverse();
return cb(err, { lastVersionId, expectedVersionIds });
});
}
function makeBucketMD(bucketName) {
return BucketInfo.fromObj({
_name: bucketName,
_owner: 'testowner',
_ownerDisplayName: 'testdisplayname',
_creationDate: new Date().toJSON(),
_acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
_mdBucketModelVersion: 10,
_transient: false,
_deleted: false,
_serverSideEncryption: null,
_versioningConfiguration: null,
_locationConstraint: 'us-east-1',
_readLocationConstraint: null,
_cors: null,
_replicationConfiguration: null,
_lifecycleConfiguration: null,
_uid: '',
_isNFS: null,
ingestion: null,
});
}
function assertContents(contents, expected) {
assert.strictEqual(contents.length, expected.length);
contents.forEach((c, i) => {
assert.strictEqual(c.key, expected[i].key);
assert.strictEqual(c.value.LastModified, expected[i].LastModified);
assert.strictEqual(c.value.staleDate, expected[i].staleDate);
assert.strictEqual(c.value.dataStoreName, expected[i].dataStoreName);
if (expected[i].VersionId) {
assert.strictEqual(c.value.VersionId, expected[i].VersionId);
}
});
}
/**
* Sets the "deleted" property to true
* @param {Object} collection - collection to be updated
* @param {string} key - object name
* @param {Function} cb - callback
* @return {undefined}
*/
function flagObjectForDeletion(collection, key, cb) {
collection.updateMany(
{ 'value.key': key },
{ $set: { 'value.deleted': true } },
{ upsert: false })
.then(() => cb())
.catch(err => cb(err));
}
module.exports = {
putBulkObjectVersions,
makeBucketMD,
assertContents,
flagObjectForDeletion,
};

Some files were not shown because too many files have changed in this diff Show More