10 Commits

66 changed files with 1206 additions and 5307 deletions

View File

@@ -1,8 +1,5 @@
{
"extends": "scality",
"env": {
"es2020": true
},
"parserOptions": {
"ecmaVersion": 2020
}

View File

@@ -11,7 +11,7 @@ on:
jobs:
analyze:
name: Static analysis with CodeQL
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4

View File

@@ -7,7 +7,7 @@ on:
jobs:
dependency-review:
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
steps:
- name: 'Checkout Repository'
uses: actions/checkout@v4

View File

@@ -8,7 +8,7 @@ on:
jobs:
test:
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
services:
# Label used to access the service container
redis:
@@ -56,7 +56,7 @@ jobs:
compile:
name: Compile and upload build artifacts
needs: test
runs-on: ubuntu-22.04
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4

View File

@@ -10,10 +10,10 @@ const MAX_STREAK_LENGTH = 100;
*/
class Skip {
/**
* @param {Object} params - skip parameters
* @param {Object} params.extension - delimiter extension used (required)
* @param {String | string[]} params.gte - current range gte (greater than or
* equal) used by the client code
* @param {Object} params - skip parameters
* @param {Object} params.extension - delimiter extension used (required)
* @param {String} params.gte - current range gte (greater than or
* equal) used by the client code
*/
constructor(params) {
assert(params.extension);

View File

@@ -9,29 +9,6 @@ export type AuthInfoType = {
IAMdisplayName: string;
};
export type AuthorizationResults = {
isAllowed: boolean,
isImplicit: boolean,
arn: string,
action: string,
versionId?: string,
}[];
export type AccountQuota = {
account: string,
quota: bigint,
};
export type AccountInfos = {
accountQuota?: AccountQuota,
};
export type AuthV4Results = {
userInfo: AuthInfoType,
authorizationResults?: AuthorizationResults,
accountQuota: AccountQuota,
};
/**
* Class containing requester's information received from Vault
* @param {object} info from Vault including arn, canonicalID,

View File

@@ -1,6 +1,6 @@
import { Logger } from 'werelogs';
import errors from '../errors';
import AuthInfo, { AccountInfos, AuthInfoType, AuthorizationResults, AuthV4Results } from './AuthInfo';
import AuthInfo, { AuthInfoType } from './AuthInfo';
/** vaultSignatureCb parses message from Vault and instantiates
* @param err - error from vault
@@ -15,17 +15,15 @@ function vaultSignatureCb(
authInfo: {
message: {
message: string,
body: AuthV4Results,
body: {
userInfo: AuthInfoType,
authorizationResults: { [key: string]: any },
accountQuota: number,
},
},
},
log: Logger,
callback: (
err: Error | null,
data?: AuthInfoType,
results?: AuthorizationResults,
params?: any,
infos?: AccountInfos,
) => void,
callback: (err: Error | null, data?: any, results?: any, params?: any, infos?: any) => void,
streamingV4Params?: any
) {
// vaultclient API guarantees that it returns:
@@ -51,7 +49,7 @@ function vaultSignatureCb(
},
});
const info = authInfo.message.body as AuthV4Results;
const info = authInfo.message.body;
const userInfo = new AuthInfo(info.userInfo);
const authorizationResults = info.authorizationResults;
const auditLog: { accountDisplayName: string, IAMdisplayName?: string } =

View File

@@ -5,21 +5,14 @@ import { calculateSigningKey, hashSignature } from './vaultUtilities';
import Indexer from './Indexer';
import BaseBackend from '../base';
import { Accounts } from './types';
import { AuthInfoType, AuthV4Results } from '../../AuthInfo';
function _formatResponse(userInfo: AuthInfoType): { message: { body: AuthV4Results } } {
function _formatResponse(userInfoToSend: any) {
return {
message: {
body: {
userInfo,
accountQuota: {
account: userInfo.canonicalID,
quota: 0n,
},
},
body: { userInfo: userInfoToSend },
},
};
};
}
/**
* Class that provides a memory backend for verifying signatures and getting

View File

@@ -42,7 +42,7 @@ export const BucketAlreadyOwnedByYou: ErrorFormat = {
code: 409,
description:
'Your previous request to create the named bucket succeeded and you already own it.',
'A bucket with this name exists and is already owned by you',
};
export const BucketNotEmpty: ErrorFormat = {

View File

@@ -1,7 +1,7 @@
import assert from 'assert';
import { v4 as uuid } from 'uuid';
import uuid from 'uuid/v4';
import { WebsiteConfiguration, WebsiteConfigurationParams } from './WebsiteConfiguration';
import { WebsiteConfiguration } from './WebsiteConfiguration';
import ReplicationConfiguration from './ReplicationConfiguration';
import LifecycleConfiguration from './LifecycleConfiguration';
import ObjectLockConfiguration from './ObjectLockConfiguration';
@@ -9,7 +9,6 @@ import BucketPolicy from './BucketPolicy';
import NotificationConfiguration from './NotificationConfiguration';
import { ACL as OACL } from './ObjectMD';
import { areTagsValid, BucketTag } from '../s3middleware/tagging';
import { VeeamCapability, VeeamSOSApiSchema, VeeamSOSApiSerializable } from './Veeam';
// WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG
// BucketInfoModelVersion.md can be found in documentation/ at the root
@@ -39,54 +38,44 @@ export type VersioningConfiguration = {
MfaDelete: any;
};
/**
* Capabilities is the schema for the Capabilities object, where the
* capacity-related fields are bigints. Used by nodejs internally.
*/
export type VeeamSOSApi = {
SystemInfo?: {
ProtocolVersion: string,
ModelName: string,
ProtocolCapabilities: {
CapacityInfo: boolean,
UploadSessions: boolean,
IAMSTS?: boolean,
},
APIEndpoints?: {
IAMEndpoint: string,
STSEndpoint: string,
},
SystemRecommendations?: {
S3ConcurrentTaskLimit: number,
S3MultiObjectDelete: number,
StorageCurrentTasksLimit: number,
KbBlockSize: number,
}
LastModified?: string,
},
CapacityInfo?: {
Capacity: number,
Available: number,
Used: number,
LastModified?: string,
},
};
// Capabilities contains all specifics from external products supported by
// our S3 implementation, at bucket level
export type Capabilities = {
VeeamSOSApi?: VeeamSOSApiSchema,
VeeamSOSApi?: VeeamSOSApi,
};
export type ACL = OACL & { WRITE: string[] }
export type BucketMetadata = {
acl: ACL;
name: string,
owner: string,
ownerDisplayName: string,
creationDate: string,
mdBucketModelVersion: number,
transient: boolean,
deleted: boolean,
serverSideEncryption?: SSE,
versioningConfiguration?: VersioningConfiguration,
locationConstraint?: string,
readLocationConstraint?: string,
websiteConfiguration?: WebsiteConfigurationParams,
cors?: CORS,
replicationConfiguration?: any,
lifecycleConfiguration?: any,
bucketPolicy?: any,
uid: string,
isNFS?: boolean,
ingestion?: { status: 'enabled' | 'disabled' },
azureInfo?: any,
objectLockEnabled?: boolean,
objectLockConfiguration?: any,
notificationConfiguration?: any,
tags: Array<BucketTag>,
capabilities?: Capabilities,
quotaMax: bigint | number,
};
export type BucketMetadataJSON = Omit<BucketMetadata, 'quotaMax' | 'capabilities'> & {
quotaMax: string;
capabilities: {
VeeamSOSApi?: VeeamSOSApiSerializable,
};
};
export default class BucketInfo implements BucketMetadata {
export default class BucketInfo {
_acl: ACL;
_name: string;
_owner: string;
@@ -95,8 +84,8 @@ export default class BucketInfo implements BucketMetadata {
_mdBucketModelVersion: number;
_transient: boolean;
_deleted: boolean;
_serverSideEncryption: SSE | null;
_versioningConfiguration: VersioningConfiguration | null;
_serverSideEncryption: SSE;
_versioningConfiguration: VersioningConfiguration;
_locationConstraint: string | null;
_websiteConfiguration?: WebsiteConfiguration | null;
_cors: CORS | null;
@@ -113,7 +102,7 @@ export default class BucketInfo implements BucketMetadata {
_azureInfo: any | null;
_ingestion: { status: 'enabled' | 'disabled' } | null;
_capabilities?: Capabilities;
_quotaMax: bigint;
_quotaMax: number | 0;
/**
* Represents all bucket information.
@@ -178,12 +167,12 @@ export default class BucketInfo implements BucketMetadata {
ownerDisplayName: string,
creationDate: string,
mdBucketModelVersion: number,
acl?: ACL,
transient?: boolean,
deleted?: boolean,
serverSideEncryption?: SSE,
versioningConfiguration?: VersioningConfiguration,
locationConstraint?: string,
acl: ACL | undefined,
transient: boolean,
deleted: boolean,
serverSideEncryption: SSE,
versioningConfiguration: VersioningConfiguration,
locationConstraint: string,
websiteConfiguration?: WebsiteConfiguration | null,
cors?: CORS,
replicationConfiguration?: any,
@@ -199,7 +188,7 @@ export default class BucketInfo implements BucketMetadata {
notificationConfiguration?: any,
tags?: Array<BucketTag> | [],
capabilities?: Capabilities,
quotaMax?: bigint | number,
quotaMax?: number | 0,
) {
assert.strictEqual(typeof name, 'string');
assert.strictEqual(typeof owner, 'string');
@@ -268,27 +257,6 @@ export default class BucketInfo implements BucketMetadata {
assert(routingRules === undefined ||
Array.isArray(routingRules));
}
if (capabilities?.VeeamSOSApi?.CapacityInfo) {
assert(
typeof capabilities.VeeamSOSApi.CapacityInfo.Capacity === 'bigint' ||
typeof capabilities.VeeamSOSApi.CapacityInfo.Capacity === 'number'
);
assert(
typeof capabilities.VeeamSOSApi.CapacityInfo.Available === 'bigint' ||
typeof capabilities.VeeamSOSApi.CapacityInfo.Available === 'number'
);
assert(
typeof capabilities.VeeamSOSApi.CapacityInfo.Used === 'bigint' ||
typeof capabilities.VeeamSOSApi.CapacityInfo.Used === 'number'
);
assert(capabilities.VeeamSOSApi.CapacityInfo.Capacity >= -1);
assert(capabilities.VeeamSOSApi.CapacityInfo.Available >= -1);
assert(capabilities.VeeamSOSApi.CapacityInfo.Used >= -1);
}
if (quotaMax) {
assert(typeof quotaMax === 'bigint' || typeof quotaMax === 'number');
assert(quotaMax >= 0, 'Quota cannot be negative');
}
if (cors) {
assert(Array.isArray(cors));
}
@@ -324,6 +292,10 @@ export default class BucketInfo implements BucketMetadata {
tags = [] as BucketTag[];
}
assert.strictEqual(areTagsValid(tags), true);
if (quotaMax) {
assert.strictEqual(typeof quotaMax, 'number');
assert(quotaMax >= 0, 'Quota cannot be negative');
}
// IF UPDATING PROPERTIES, INCREMENT MODELVERSION NUMBER ABOVE
this._acl = aclInstance;
@@ -351,23 +323,17 @@ export default class BucketInfo implements BucketMetadata {
this._objectLockConfiguration = objectLockConfiguration || null;
this._notificationConfiguration = notificationConfiguration || null;
this._tags = tags;
this._capabilities = capabilities && {
...capabilities,
VeeamSOSApi: capabilities.VeeamSOSApi &&
VeeamCapability.toBigInt(capabilities.VeeamSOSApi),
};
this._quotaMax = BigInt(quotaMax || 0n);
this._capabilities = capabilities || undefined;
this._quotaMax = quotaMax || 0;
return this;
}
/**
* Make the bucket info serializable
* @return - serializable object
*/
makeSerializable() {
const bucketInfos: any & VeeamSOSApiSerializable = {
* Serialize the object
* @return - stringified object
*/
serialize() {
const bucketInfos = {
acl: this._acl,
name: this._name,
owner: this._owner,
@@ -393,12 +359,8 @@ export default class BucketInfo implements BucketMetadata {
objectLockConfiguration: this._objectLockConfiguration,
notificationConfiguration: this._notificationConfiguration,
tags: this._tags,
capabilities: this._capabilities && {
...this._capabilities,
VeeamSOSApi: this._capabilities.VeeamSOSApi &&
VeeamCapability.serialize(this._capabilities.VeeamSOSApi),
},
quotaMax: this._quotaMax.toString(),
capabilities: this._capabilities,
quotaMax: this._quotaMax,
};
const final = this._websiteConfiguration
? {
@@ -406,29 +368,15 @@ export default class BucketInfo implements BucketMetadata {
websiteConfiguration: this._websiteConfiguration.getConfig(),
}
: bucketInfos;
return final;
return JSON.stringify(final);
}
/**
* Serialize the object
* @return - stringified object
*/
serialize() {
return JSON.stringify(this.makeSerializable());
}
/**
* deSerialize the JSON string
* @param stringBucket - the stringified bucket
* @return - parsed string
*/
static deSerialize(stringBucket: string) {
const obj: BucketMetadataJSON = JSON.parse(stringBucket);
const capabilities = obj.capabilities && {
...obj.capabilities,
VeeamSOSApi: obj.capabilities?.VeeamSOSApi &&
VeeamCapability.parse(obj.capabilities?.VeeamSOSApi),
};
const obj = JSON.parse(stringBucket);
const websiteConfig = obj.websiteConfiguration ?
new WebsiteConfiguration(obj.websiteConfiguration) : null;
return new BucketInfo(obj.name, obj.owner, obj.ownerDisplayName,
@@ -439,7 +387,7 @@ export default class BucketInfo implements BucketMetadata {
obj.bucketPolicy, obj.uid, obj.readLocationConstraint, obj.isNFS,
obj.ingestion, obj.azureInfo, obj.objectLockEnabled,
obj.objectLockConfiguration, obj.notificationConfiguration, obj.tags,
capabilities, BigInt(obj.quotaMax || 0n));
obj.capabilities, obj.quotaMax);
}
/**
@@ -457,11 +405,6 @@ export default class BucketInfo implements BucketMetadata {
* @return Return an BucketInfo
*/
static fromObj(data: any) {
const capabilities: Capabilities = data._capabilities && {
...data._capabilities,
VeeamSOSApi: data._capabilities?.VeeamSOSApi &&
VeeamCapability.parse(data._capabilities?.VeeamSOSApi),
};
return new BucketInfo(data._name, data._owner, data._ownerDisplayName,
data._creationDate, data._mdBucketModelVersion, data._acl,
data._transient, data._deleted, data._serverSideEncryption,
@@ -471,31 +414,8 @@ export default class BucketInfo implements BucketMetadata {
data._bucketPolicy, data._uid, data._readLocationConstraint,
data._isNFS, data._ingestion, data._azureInfo,
data._objectLockEnabled, data._objectLockConfiguration,
data._notificationConfiguration, data._tags, capabilities,
BigInt(data._quotaMax || 0n));
}
/**
* Create a BucketInfo from a JSON object
*
* @param data - object containing data
* @return Return an BucketInfo
*/
static fromJson(data: BucketMetadataJSON) {
return new BucketInfo(data.name, data.owner, data.ownerDisplayName,
data.creationDate, data.mdBucketModelVersion, data.acl,
data.transient, data.deleted, data.serverSideEncryption,
data.versioningConfiguration, data.locationConstraint,
data.websiteConfiguration && new WebsiteConfiguration(data.websiteConfiguration),
data.cors, data.replicationConfiguration, data.lifecycleConfiguration,
data.bucketPolicy, data.uid, data.readLocationConstraint,
data.isNFS, data.ingestion, data.azureInfo,
data.objectLockEnabled, data.objectLockConfiguration,
data.notificationConfiguration, data.tags, {
...data.capabilities,
VeeamSOSApi: data.capabilities?.VeeamSOSApi &&
VeeamCapability.parse(data.capabilities?.VeeamSOSApi),
}, BigInt(data.quotaMax || 0n));
data._notificationConfiguration, data._tags, data._capabilities,
data._quotaMax);
}
/**
@@ -910,8 +830,6 @@ export default class BucketInfo implements BucketMetadata {
* Check if the bucket is an NFS bucket.
* @return - Wether the bucket is NFS or not
*/
// @ts-expect-error the function name is not compatible
// with an extension of the BucketMetadata interface
isNFS() {
return this._isNFS;
}
@@ -1035,7 +953,7 @@ export default class BucketInfo implements BucketMetadata {
* @param capability? - if provided, will return a specific capacity
* @return - capability of the bucket
*/
getCapability(capability: string) : Capabilities[keyof Capabilities] | undefined {
getCapability(capability: string) : VeeamSOSApi | undefined {
if (capability && this._capabilities && this._capabilities[capability]) {
return this._capabilities[capability];
}
@@ -1064,8 +982,8 @@ export default class BucketInfo implements BucketMetadata {
* @param quota - quota to be set
* @return - bucket quota info
*/
setQuota(quota: bigint | number) {
this._quotaMax = BigInt(quota || 0n);
setQuota(quota: number) {
this._quotaMax = quota || 0;
return this;
}
}

View File

@@ -1,5 +1,5 @@
import assert from 'assert';
import { v4 as uuidv4 } from 'uuid';
import UUID from 'uuid';
import errors, { ArsenalError } from '../errors';
import LifecycleRule from './LifecycleRule';
@@ -455,7 +455,7 @@ export default class LifecycleConfiguration {
if (!id || !id[0] || id[0] === '') {
// ID is optional property, but create one if not provided or is ''
// We generate 48-character alphanumeric, unique ID for rule
idObj.ruleID = Buffer.from(uuidv4()).toString('base64');
idObj.ruleID = Buffer.from(UUID.v4()).toString('base64');
} else {
idObj.ruleID = id[0];
}

View File

@@ -1,4 +1,4 @@
import { v4 as uuid } from 'uuid';
import uuid from 'uuid/v4';
export type Status = 'Disabled' | 'Enabled';
export type Tag = { Key: string; Value: string };

View File

@@ -1,5 +1,5 @@
import assert from 'assert';
import { v4 as uuidv4 } from 'uuid';
import UUID from 'uuid';
import {
supportedNotificationEvents,
@@ -220,7 +220,7 @@ export default class NotificationConfiguration {
if (!id || !id[0]) {
// id is optional property, so create one if not provided or is ''
// We generate 48-character alphanumeric, unique id for rule
validId = Buffer.from(uuidv4()).toString('base64');
validId = Buffer.from(UUID.v4()).toString('base64');
} else {
validId = id[0];
}

View File

@@ -1,5 +1,5 @@
import assert from 'assert';
import { v4 as uuidv4 } from 'uuid';
import UUID from 'uuid';
import { RequestLogger } from 'werelogs';
@@ -157,7 +157,7 @@ export default class ReplicationConfiguration {
obj.id =
rule.ID && rule.ID[0] !== ''
? rule.ID[0]
: Buffer.from(uuidv4()).toString('base64');
: Buffer.from(UUID.v4()).toString('base64');
// StorageClass is an optional property.
if (rule.Destination[0].StorageClass) {
obj.storageClass = rule.Destination[0].StorageClass[0];

View File

@@ -1,107 +0,0 @@
/**
* VeeamSOSApiSchema is the schema for the capabilities in the
* BucketInfo class.
* The capcity-related field accept numbers, but will be treated as
* bigints internally.
*/
export type VeeamSOSApiSchema = {
SystemInfo?: {
ProtocolVersion: string,
ModelName: string,
ProtocolCapabilities: {
CapacityInfo: boolean,
UploadSessions: boolean,
IAMSTS?: boolean,
},
APIEndpoints?: {
IAMEndpoint: string,
STSEndpoint: string,
},
SystemRecommendations?: {
S3ConcurrentTaskLimit: number,
S3MultiObjectDelete: number,
StorageCurrentTasksLimit: number,
KbBlockSize: number,
}
LastModified?: string,
},
CapacityInfo?: {
Capacity: bigint | number,
Available: bigint | number,
Used: bigint | number,
LastModified?: string,
},
};
/**
* VeeamSOSApiSerializable is the serializable version of the
* VeeamSOSApiSchema, where the capacity-related fields are
* strings.
*/
export type VeeamSOSApiSerializable = Omit<VeeamSOSApiSchema, 'CapacityInfo'> & {
CapacityInfo?: {
Capacity: string,
Available: string,
Used: string,
LastModified?: string,
},
}
/**
* The Veeam capacity for an S3 Bucket adds the
* ability to use the proprietary SOSAPI feature.
*/
export class VeeamCapacityInfo {
static serialize(capacity: VeeamSOSApiSchema['CapacityInfo']): VeeamSOSApiSerializable['CapacityInfo'] {
return {
Capacity: capacity?.Capacity?.toString() || '0',
Available: capacity?.Available?.toString() || '0',
Used: capacity?.Used?.toString() || '0',
LastModified: capacity?.LastModified,
};
}
static parse(capacity: VeeamSOSApiSerializable['CapacityInfo']): VeeamSOSApiSchema['CapacityInfo'] {
return {
Capacity: BigInt(capacity?.Capacity || 0),
Available: BigInt(capacity?.Available || 0),
Used: BigInt(capacity?.Used || 0),
LastModified: capacity?.LastModified,
};
}
static toBigInt(capacity: VeeamSOSApiSchema['CapacityInfo']): VeeamSOSApiSchema['CapacityInfo'] {
return {
Capacity: BigInt(capacity?.Capacity || 0),
Available: BigInt(capacity?.Available || 0),
Used: BigInt(capacity?.Used || 0),
LastModified: capacity?.LastModified,
};
}
}
export class VeeamCapability {
static serialize(veeamCapability: VeeamSOSApiSchema): VeeamSOSApiSerializable {
return {
...veeamCapability,
CapacityInfo: veeamCapability.CapacityInfo &&
VeeamCapacityInfo.serialize(veeamCapability.CapacityInfo),
};
}
static parse(veeamCapability: VeeamSOSApiSerializable): VeeamSOSApiSchema {
return {
...veeamCapability,
CapacityInfo: veeamCapability.CapacityInfo &&
VeeamCapacityInfo.parse(veeamCapability.CapacityInfo),
};
}
static toBigInt(veeamCapability: VeeamSOSApiSchema): VeeamSOSApiSchema {
return {
...veeamCapability,
CapacityInfo: veeamCapability.CapacityInfo &&
VeeamCapacityInfo.toBigInt(veeamCapability.CapacityInfo),
};
}
}

View File

@@ -75,14 +75,6 @@ export type RedirectAllRequestsTo = {
hostName: string;
protocol?: string;
};
export type WebsiteConfigurationParams = {
indexDocument: string;
errorDocument: string;
redirectAllRequestsTo: RedirectAllRequestsTo;
routingRules: RoutingRule[] | any[],
};
export class WebsiteConfiguration {
_indexDocument?: string;
_errorDocument?: string;
@@ -105,7 +97,12 @@ export class WebsiteConfiguration {
* @param params.routingRules - array of Routing
* Rule instances or plain routing rule objects to cast as RoutingRule's
*/
constructor(params: WebsiteConfigurationParams) {
constructor(params: {
indexDocument: string;
errorDocument: string;
redirectAllRequestsTo: RedirectAllRequestsTo;
routingRules: RoutingRule[] | any[],
}) {
if (params) {
this._indexDocument = params.indexDocument;
this._errorDocument = params.errorDocument;

View File

@@ -1,4 +1,4 @@
import { v4 as uuid } from 'uuid';
import uuid from 'uuid/v4';
import Message from './Message';
import * as werelogs from 'werelogs';

View File

@@ -1,4 +1,4 @@
import { v4 as uuid } from 'uuid';
import uuid from 'uuid';
import * as stream from 'stream';
import debug_ from 'debug';
import assert from 'assert';

View File

@@ -228,7 +228,6 @@ export default class RequestContext {
existingObjTag?: string,
needTagEval?: false,
objectLockRetentionDays?: number,
needQuota?: boolean,
) {
this._headers = headers;
this._query = query;
@@ -257,7 +256,7 @@ export default class RequestContext {
this._securityToken = securityToken;
this._policyArn = policyArn;
this._action = action;
this._needQuota = needQuota || actionNeedQuotaCheck[apiMethod] === true
this._needQuota = actionNeedQuotaCheck[apiMethod] === true
|| actionWithDataDeletion[apiMethod] === true;
this._requestObjTags = requestObjTags || null;
this._existingObjTag = existingObjTag || null;
@@ -295,7 +294,6 @@ export default class RequestContext {
existingObjTag: this._existingObjTag,
needTagEval: this._needTagEval,
objectLockRetentionDays: this._objectLockRetentionDays,
needQuota: this._needQuota,
};
return JSON.stringify(requestInfo);
}
@@ -337,7 +335,6 @@ export default class RequestContext {
obj.existingObjTag,
obj.needTagEval,
obj.objectLockRetentionDays,
obj.needQuota,
);
}

View File

@@ -1,15 +1,16 @@
import * as ipCheck from '../ipCheck'
import { IncomingMessage } from 'http'
import { TLSSocket } from 'tls'
export interface S3Config {
requests: {
trustedProxyCIDRs: string[],
extractClientIPFromHeader: string,
extractProtocolFromHeader: string,
extractClientIPFromHeader: string
}
}
// TODO
// I'm not sure about this behavior.
// Should it returns string | string[] | undefined or string ?
/**
* getClientIp - Gets the client IP from the request
* @param request - http request object
@@ -19,7 +20,8 @@ export interface S3Config {
export function getClientIp(request: IncomingMessage, s3config?: S3Config): string {
const requestConfig = s3config?.requests;
const remoteAddress = request.socket.remoteAddress;
const clientIp = remoteAddress?.toString() ?? '';
// TODO What to do if clientIp === undefined ?
const clientIp = (requestConfig ? remoteAddress : request.headers['x-forwarded-for'] || remoteAddress)?.toString() ?? '';
if (requestConfig) {
const { trustedProxyCIDRs, extractClientIPFromHeader } = requestConfig;
/**
@@ -28,31 +30,11 @@ export function getClientIp(request: IncomingMessage, s3config?: S3Config): stri
* which header to be used to extract client IP
*/
if (ipCheck.ipMatchCidrList(trustedProxyCIDRs, clientIp)) {
// Request headers in nodejs are lower-cased, so we should not
// be case-sentive when looking for the header, as http headers
// are case-insensitive.
const ipFromHeader = request.headers[extractClientIPFromHeader.toLowerCase()]?.toString();
const ipFromHeader = request.headers[extractClientIPFromHeader]?.toString();
if (ipFromHeader && ipFromHeader.trim().length) {
return ipFromHeader.split(',')[0].trim();
}
}
}
return clientIp;
}
/**
* getHttpProtocolSecurity - Dete²object
* @param s3config - s3 config
* @return {boolean} - returns true if the request is secure
*/
export function getHttpProtocolSecurity(request: IncomingMessage, s3config?: S3Config): boolean {
const requestConfig = s3config?.requests;
if (requestConfig) {
const { trustedProxyCIDRs } = requestConfig;
const clientIp = request.socket.remoteAddress?.toString() ?? '';
if (ipCheck.ipMatchCidrList(trustedProxyCIDRs, clientIp)) {
return request.headers[requestConfig.extractProtocolFromHeader.toLowerCase()] === 'https';
}
}
return request.socket instanceof TLSSocket && request.socket.encrypted;
return clientIp?.toString() ?? '';
}

View File

@@ -161,7 +161,7 @@ const actionMonitoringMapS3 = {
serviceGet: 'ListBuckets',
bucketGetQuota: 'GetBucketQuota',
bucketUpdateQuota: 'UpdateBucketQuota',
bucketDeleteQuota: 'DeleteBucketQuota',
bucketDeleteQuota: 'DeleteBucketQuota',
};
const actionMapAccountQuotas = {
@@ -229,14 +229,10 @@ const actionMapMetadata = {
const actionMapScuba = {
GetMetrics: 'scuba:GetMetrics',
GetMetricsBatch: 'scuba:GetMetricsBatch',
AdminStartIngest: 'scuba:AdminStartIngest',
AdminStopIngest: 'scuba:AdminStopIngest',
AdminReadRaftCseq: 'scuba:AdminReadRaftCseq',
AdminTriggerRepair: 'scuba:AdminTriggerRepair',
AdminStartDownsample: 'scuba:AdminStartDownsample',
AdminStopDownsample: 'scuba:AdminStopDownsample',
AdminTriggerDownsample: 'scuba:AdminTriggerDownsample',
};
export {

View File

@@ -1,5 +1,3 @@
import type { WithImplicitCoercion } from 'node:buffer';
const msInOneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day.
export const getMD5Buffer = (base64MD5: WithImplicitCoercion<string> | Uint8Array) =>

View File

@@ -129,9 +129,8 @@ export function validateAndFilterMpuParts(
key: item.key,
ETag: `"${item.value.ETag}"`,
size: item.value.Size,
locations: (item.value.location||item.value.partLocations) instanceof Array
? (item.value.location||item.value.partLocations)
: [(item.value.location||item.value.partLocations)],
locations: Array.isArray(item.value.partLocations) ?
item.value.partLocations : [item.value.partLocations],
});
});
keysToDelete.push(mpuOverviewKey);

View File

@@ -1,7 +1,7 @@
import { RequestLogger } from 'werelogs';
import * as routesUtils from '../routesUtils';
import errors, { ArsenalError } from '../../errors';
import errors from '../../errors';
import StatsClient from '../../metrics/StatsClient';
import * as http from 'http';
@@ -54,14 +54,11 @@ export default function routeDELETE(
api.callApiMethod('objectDelete', request, response, log,
(err, corsHeaders) => {
/*
* Since AWS expects a 204 regardless of the existence of
* the object, the errors NoSuchKey and NoSuchVersion should not
* be sent back as a response.
*/
if (err && (
!(err instanceof ArsenalError) ||
(!err.is.NoSuchKey && !err.is.NoSuchVersion)
)) {
* Since AWS expects a 204 regardless of the existence of
the object, the errors NoSuchKey and NoSuchVersion should not
* be sent back as a response.
*/
if (err && !err.is.NoSuchKey && !err.is.NoSuchVersion) {
return routesUtils.responseNoBody(err, corsHeaders,
response, undefined, log);
}

View File

@@ -87,6 +87,8 @@ export default function routerGET(
if (resMetaHeaders && resMetaHeaders['Content-Length']) {
contentLength = resMetaHeaders['Content-Length'];
}
// TODO ARSN-216 Fix logger
// @ts-ignore
log.end().addDefaultFields({ contentLength });
routesUtils.statsReport500(err, statsClient);
return routesUtils.responseStreamData(err, query,

View File

@@ -225,7 +225,9 @@ export default function routePUT(
return routesUtils.responseNoBody(errors.BadRequest,
null, response, 400, log);
}
log.end().addDefaultFields({ contentLength: parsedContentLength });
// TODO ARSN-216 What's happening?
// @ts-ignore
log.end().addDefaultFields({ contentLength: request.parsedContentLength });
api.callApiMethod('objectPut', request, response, log,
(err, resHeaders) => {
routesUtils.statsReport500(err, statsClient);

View File

@@ -17,7 +17,7 @@ export type CallApiMethod = (
request: http.IncomingMessage,
response: http.ServerResponse,
log: RequestLogger,
callback: (err: ArsenalError | Error | null, ...data: any[]) => void,
callback: (err: ArsenalError | null, ...data: any[]) => void,
) => void;
/**
@@ -27,7 +27,7 @@ export type CallApiMethod = (
* @param log - Werelogs logger
* @return response - response object with additional headers
*/
export function setCommonResponseHeaders(
function setCommonResponseHeaders(
headers: { [key: string]: string } | undefined | null,
response: http.ServerResponse,
log: RequestLogger,
@@ -71,13 +71,15 @@ function okHeaderResponse(
log.debug('response http code', { httpCode });
response.writeHead(httpCode);
return response.end(() => {
// TODO ARSN-216 Fix logger
// @ts-expect-error
log.end().info('responded to request', {
httpCode: response.statusCode,
});
});
}
export const XMLResponseBackend = {
const XMLResponseBackend = {
/**
* okXMLResponse - Response with XML body
@@ -104,6 +106,8 @@ export const XMLResponseBackend = {
log.debug('response http code', { httpCode: 200 });
log.trace('xml response', { xml });
return response.end(xml, 'utf8', () => {
// TODO ARSN-216 Fix logger
// @ts-expect-error
log.end().info('responded with XML', {
httpCode: response.statusCode,
});
@@ -111,29 +115,25 @@ export const XMLResponseBackend = {
},
errorResponse: function errorXMLResponse(
errCode: ArsenalError | Error,
errCode: ArsenalError,
response: http.ServerResponse,
log: RequestLogger,
corsHeaders?: { [key: string]: string } | null,
) {
setCommonResponseHeaders(corsHeaders, response, log);
let error: ArsenalError;
if (errCode instanceof ArsenalError) {
error = errCode;
} else {
error = errors.InternalError.customizeDescription(errCode.message);
}
// early return to avoid extra headers and XML data
if (error.code === 304) {
response.writeHead(error.code);
if (errCode.code === 304) {
response.writeHead(errCode.code);
return response.end('', 'utf8', () => {
// TODO ARSN-216 Fix logger
// @ts-expect-error
log.end().info('responded with empty body', {
httpCode: response.statusCode,
});
});
}
log.trace('sending error xml response', { error });
log.trace('sending error xml response', { errCode });
/*
<?xml version="1.0" encoding="UTF-8"?>
<Error>
@@ -147,10 +147,10 @@ export const XMLResponseBackend = {
xml.push(
'<?xml version="1.0" encoding="UTF-8"?>',
'<Error>',
`<Code>${error.message}</Code>`,
`<Message>${error.description}</Message>`,
`<Code>${errCode.message}</Code>`,
`<Message>${errCode.description}</Message>`,
);
const invalidArguments = error.metadata.get('invalidArguments') || [];
const invalidArguments = errCode.metadata.get('invalidArguments') || [];
invalidArguments.forEach((invalidArgument, index) => {
const counter = index + 1;
const { ArgumentName, ArgumentValue } = invalidArgument as any;
@@ -165,11 +165,13 @@ export const XMLResponseBackend = {
const xmlStr = xml.join('');
const bytesSent = Buffer.byteLength(xmlStr);
log.addDefaultFields({ bytesSent });
response.writeHead(error.code, {
response.writeHead(errCode.code, {
'Content-Type': 'application/xml',
'Content-Length': bytesSent ,
});
return response.end(xmlStr, 'utf8', () => {
// TODO ARSN-216 Fix logger
// @ts-expect-error
log.end().info('responded with error XML', {
httpCode: response.statusCode,
});
@@ -177,7 +179,7 @@ export const XMLResponseBackend = {
},
};
export const JSONResponseBackend = {
const JSONResponseBackend = {
/**
* okJSONResponse - Response with JSON body
@@ -202,6 +204,8 @@ export const JSONResponseBackend = {
log.debug('response http code', { httpCode: 200 });
log.trace('json response', { json });
return response.end(json, 'utf8', () => {
// TODO ARSN-216 Fix logger
// @ts-expect-error
log.end().info('responded with JSON', {
httpCode: response.statusCode,
});
@@ -209,18 +213,12 @@ export const JSONResponseBackend = {
},
errorResponse: function errorJSONResponse(
errCode: ArsenalError | Error,
errCode: ArsenalError,
response: http.ServerResponse,
log: RequestLogger,
corsHeaders?: { [key: string]: string } | null,
) {
log.trace('sending error json response', { errCode });
let error: ArsenalError;
if (errCode instanceof ArsenalError) {
error = errCode;
} else {
error = errors.InternalError.customizeDescription(errCode.message);
}
/*
{
"code": "NoSuchKey",
@@ -229,7 +227,7 @@ export const JSONResponseBackend = {
"requestId": "4442587FB7D0A2F9"
}
*/
const invalidArguments = error.metadata.get('invalidArguments') || [];
const invalidArguments = errCode.metadata.get('invalidArguments') || [];
const invalids = invalidArguments.reduce((acc, invalidArgument, index) => {
const counter = index + 1;
const { ArgumentName, ArgumentValue } = invalidArgument as any;
@@ -238,8 +236,8 @@ export const JSONResponseBackend = {
return { ...acc, [name]: ArgumentName, [value]: ArgumentValue };
}, {});
const data = JSON.stringify({
code: error.message,
message: error.description,
code: errCode.message,
message: errCode.description,
...invalids,
resource: null,
requestId: log.getSerializedUids(),
@@ -247,11 +245,13 @@ export const JSONResponseBackend = {
const bytesSent = Buffer.byteLength(data);
log.addDefaultFields({ bytesSent });
setCommonResponseHeaders(corsHeaders, response, log);
response.writeHead(error.code, {
response.writeHead(errCode.code, {
'Content-Type': 'application/json',
'Content-Length': bytesSent,
});
return response.end(data, 'utf8', () => {
// TODO ARSN-216 Fix logger
// @ts-expect-error
log.end().info('responded with error JSON', {
httpCode: response.statusCode,
});
@@ -467,7 +467,7 @@ function retrieveData(
function _responseBody(
responseBackend: typeof XMLResponseBackend,
errCode: ArsenalError | Error | null | undefined,
errCode: ArsenalError | null | undefined,
payload: string | null,
response: http.ServerResponse,
log: RequestLogger,
@@ -517,7 +517,7 @@ function _contentLengthMatchesLocations(
* @return - error or success response utility
*/
export function responseXMLBody(
errCode: ArsenalError | Error | null | undefined,
errCode: ArsenalError | null | undefined,
xml: string | null,
response: http.ServerResponse,
log: RequestLogger,
@@ -557,7 +557,7 @@ export function responseJSONBody(
* @return - error or success response utility
*/
export function responseNoBody(
errCode: ArsenalError | Error | null,
errCode: ArsenalError | null,
resHeaders: { [key: string]: string } | null,
response: http.ServerResponse,
httpCode = 200,
@@ -583,7 +583,7 @@ export function responseNoBody(
* @return - router's response object
*/
export function responseContentHeaders(
errCode: ArsenalError | Error | null,
errCode: ArsenalError | null,
overrideParams: { [key: string]: string },
resHeaders: { [key: string]: string },
response: http.ServerResponse,
@@ -600,6 +600,8 @@ export function responseContentHeaders(
undefined, log);
}
return response.end(() => {
// TODO ARSN-216 Fix logger
// @ts-expect-error
log.end().info('responded with content headers', {
httpCode: response.statusCode,
});
@@ -622,7 +624,7 @@ export function responseContentHeaders(
* @param log - Werelogs logger
*/
export function responseStreamData(
errCode: ArsenalError | Error | null,
errCode: ArsenalError | null,
overrideParams: { [key: string]: string },
resHeaders: { [key: string]: string },
dataLocations: { size: string | number }[],
@@ -656,12 +658,16 @@ export function responseStreamData(
}
if (dataLocations === null || _computeContentLengthFromLocation(dataLocations) === 0) {
return response.end(() => {
// TODO ARSN-216 Fix logger
// @ts-expect-error
log.end().info('responded with only metadata', {
httpCode: response.statusCode,
});
});
}
response.on('finish', () => {
// TODO ARSN-216 Fix logger
// @ts-expect-error
log.end().info('responded with streamed content', {
httpCode: response.statusCode,
});
@@ -680,24 +686,20 @@ export function responseStreamData(
* @param log - Werelogs logger
*/
export function streamUserErrorPage(
err: ArsenalError | Error,
err: ArsenalError,
dataLocations: { size: string | number }[],
retrieveDataParams: any,
response: http.ServerResponse,
corsHeaders: { [key: string]: string },
log: RequestLogger,
) {
let error: ArsenalError;
if (err instanceof ArsenalError) {
error = err;
} else {
error = errors.InternalError.customizeDescription(err.message);
}
setCommonResponseHeaders(corsHeaders, response, log);
response.setHeader('x-amz-error-code', error.message);
response.setHeader('x-amz-error-message', error.description);
response.writeHead(error.code, { 'Content-type': 'text/html' });
response.setHeader('x-amz-error-code', err.message);
response.setHeader('x-amz-error-message', err.description);
response.writeHead(err.code, { 'Content-type': 'text/html' });
response.on('finish', () => {
// TODO ARSN-216 Fix logger
// @ts-expect-error
log.end().info('responded with streamed content', {
httpCode: response.statusCode,
});
@@ -715,37 +717,30 @@ export function streamUserErrorPage(
* @param log - Werelogs logger
g */
export function errorHtmlResponse(
err: ArsenalError | Error,
err: ArsenalError,
userErrorPageFailure: boolean,
bucketName: string,
response: http.ServerResponse,
corsHeaders: { [key: string]: string } | null,
log: RequestLogger,
) {
let error;
if (err instanceof ArsenalError) {
error = err;
} else {
error = errors.InternalError.customizeDescription(err.message);
}
log.trace('sending generic html error page',
{ error });
{ err });
setCommonResponseHeaders(corsHeaders, response, log);
response.writeHead(error.code, { 'Content-type': 'text/html' });
response.writeHead(err.code, { 'Content-type': 'text/html' });
const html: string[] = [];
// response.statusMessage will provide standard message for status
// code so much set response status code before creating html
html.push(
'<html>',
'<head>',
`<title>${error.code} ${response.statusMessage}</title>`,
`<title>${err.code} ${response.statusMessage}</title>`,
'</head>',
'<body>',
`<h1>${error.code} ${response.statusMessage}</h1>`,
`<h1>${err.code} ${response.statusMessage}</h1>`,
'<ul>',
`<li>Code: ${error.message}</li>`,
`<li>Message: ${error.description}</li>`,
`<li>Code: ${err.message}</li>`,
`<li>Message: ${err.description}</li>`,
);
if (!userErrorPageFailure && bucketName) {
@@ -763,8 +758,8 @@ export function errorHtmlResponse(
'to Retrieve a Custom ',
'Error Document</h3>',
'<ul>',
`<li>Code: ${error.message}</li>`,
`<li>Message: ${error.description}</li>`,
`<li>Code: ${err.message}</li>`,
`<li>Message: ${err.description}</li>`,
'</ul>',
);
}
@@ -775,6 +770,8 @@ export function errorHtmlResponse(
);
return response.end(html.join(''), 'utf8', () => {
// TODO ARSN-216 Fix logger
// @ts-expect-error
log.end().info('responded with error html', {
httpCode: response.statusCode,
});
@@ -788,24 +785,20 @@ export function errorHtmlResponse(
* @param log - Werelogs logger
*/
export function errorHeaderResponse(
err: ArsenalError | Error,
err: ArsenalError,
response: http.ServerResponse,
corsHeaders: { [key: string]: string },
log: RequestLogger,
) {
let error: ArsenalError;
if (err instanceof ArsenalError) {
error = err;
} else {
error = errors.InternalError.customizeDescription(err.message);
}
log.trace('sending error header response',
{ error });
{ err });
setCommonResponseHeaders(corsHeaders, response, log);
response.setHeader('x-amz-error-code', error.message);
response.setHeader('x-amz-error-message', error.description);
response.writeHead(error.code);
response.setHeader('x-amz-error-code', err.message);
response.setHeader('x-amz-error-message', err.description);
response.writeHead(err.code);
return response.end(() => {
// TODO ARSN-216 Fix logger
// @ts-expect-error
log.end().info('responded with error headers', {
httpCode: response.statusCode,
});
@@ -889,6 +882,8 @@ export function redirectRequest(
// remove hanging slash
redirectLocation = redirectLocation.slice(0, -1);
}
// TODO ARSN-216 Fix logger
// @ts-expect-error
log.end().info('redirecting request', {
httpCode: redirectCode,
redirectLocation: hostName,
@@ -916,7 +911,7 @@ export function redirectRequest(
* @param log - Werelogs instance
*/
export function redirectRequestOnError(
err: ArsenalError | Error,
err: ArsenalError,
method: 'HEAD' | 'GET',
routingInfo: {
withError: true;
@@ -930,24 +925,17 @@ export function redirectRequestOnError(
) {
response.setHeader('Location', routingInfo.location);
let error: ArsenalError;
if (err instanceof ArsenalError) {
error = err;
} else {
error = errors.InternalError.customizeDescription(err.message);
}
if (!dataLocations && error.is.Found) {
if (!dataLocations && err.is.Found) {
if (method === 'HEAD') {
return errorHeaderResponse(error, response, corsHeaders, log);
return errorHeaderResponse(err, response, corsHeaders, log);
}
response.setHeader('x-amz-error-code', error.message);
response.setHeader('x-amz-error-message', error.description);
return errorHtmlResponse(error, false, '', response, corsHeaders, log);
response.setHeader('x-amz-error-code', err.message);
response.setHeader('x-amz-error-message', err.description);
return errorHtmlResponse(err, false, '', response, corsHeaders, log);
}
// This is reached only for website error document (GET only)
const overrideErrorCode = error.flatten();
const overrideErrorCode = err.flatten();
overrideErrorCode.code = 301;
return streamUserErrorPage(ArsenalError.unflatten(overrideErrorCode)!,
dataLocations || [], retrieveDataParams, response, corsHeaders, log);
@@ -1234,8 +1222,8 @@ export function parseContentMD5(headers: http.IncomingHttpHeaders) {
* @param err - Arsenal error
* @param statsClient - StatsClient instance
*/
export function statsReport500(err?: ArsenalError | Error | null, statsClient?: StatsClient | null) {
if (statsClient && err instanceof ArsenalError && err?.code === 500) {
export function statsReport500(err?: ArsenalError | null, statsClient?: StatsClient | null) {
if (statsClient && err && err.code === 500) {
statsClient.report500('s3');
}
return undefined;

View File

@@ -14,7 +14,7 @@ const GcpClient = require('./external/GcpClient');
const PfsClient = require('./external/PfsClient');
const backendUtils = require('./external/utils');
function parseLC(config, vault, metadata) {
function parseLC(config, vault) {
const clients = {};
Object.keys(config.locationConstraints).forEach(location => {
@@ -27,7 +27,7 @@ function parseLC(config, vault, metadata) {
}
if (locationObj.type === 'vitastor') {
const VitastorBackend = require('./vitastor/VitastorBackend');
clients[location] = new VitastorBackend(location, locationObj.details, metadata);
clients[location] = new VitastorBackend(location, locationObj.details);
}
if (locationObj.type === 'scality') {
if (locationObj.details.connector.sproxyd) {

View File

@@ -1,4 +1,4 @@
const { v4: uuid } = require('uuid');
const uuid = require('uuid/v4');
const errors = require('../../../../../errors').default;
const { createMpuKey, logger, getPutTagsMetadata } = require('../GcpUtils');
const { logHelper } = require('../../utils');

View File

@@ -1,89 +0,0 @@
// Zenko CloudServer Vitastor data storage backend adapter
// Copyright (c) Vitaliy Filippov, 2019+
// License: VNPL-1.1 (see README.md for details)
const assert = require('assert');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
import { MongoClient, Db } from 'mongodb';
import { MongoKVWrapper } from './KVWrapper';
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27021 },
],
replSet: {
name: 'customSetName',
count: 1,
dbName: 'mongokvtest',
storageEngine: 'ephemeralForTest',
},
});
let kv: MongoKVWrapper|null = null;
beforeAll(async () =>
{
await mongoserver.start();
await mongoserver.waitUntilRunning();
kv = new MongoKVWrapper('mongodb://127.0.0.1:27021/?w=majority&readPreference=primary&replicaSet=customSetName',
'mongokvtest', 'volumes');
await kv.open();
});
afterAll(async () =>
{
if (kv)
await kv.close();
await mongoserver.stop();
});
test('MongoKVWrapper', async () =>
{
if (!kv)
throw new Error('not connected');
// insert a number and check that the type is preserved
assert.strictEqual(await kv.get('id'), null);
assert.strictEqual(await kv.update('id', null, 123), true);
assert.strictEqual(await kv.get('id'), 123);
// vol_1_1 not exists
assert.strictEqual(await kv.get('vol_1_1'), null);
const hdr = { created: Date.now(), bucket: 'helloworld' };
// update non-existing key - should fail
assert.strictEqual(await kv.update('vol_1_1', {}, hdr), false);
// key is not affected by a failed operation
assert.strictEqual(await kv.get('vol_1_1'), null);
// create key
assert.strictEqual(await kv.update('vol_1_1', null, hdr), true);
assert.deepStrictEqual(await kv.get('vol_1_1'), hdr);
// try to create a duplicate key - should fail
assert.strictEqual(await kv.update('vol_1_1', null, hdr), false);
// key is not affected by a failed operation
assert.deepStrictEqual(await kv.get('vol_1_1'), hdr);
// update key
const hdr2 = { created: Date.now(), bucket: 'helloworld', deleted: Date.now() };
assert.strictEqual(await kv.update('vol_1_1', hdr, hdr2), true);
assert.deepStrictEqual(await kv.get('vol_1_1'), hdr2);
// try to update again from the same old value - should fail
assert.strictEqual(await kv.update('vol_1_1', hdr, hdr2), false);
// key is not affected by a failed operation
assert.deepStrictEqual(await kv.get('vol_1_1'), hdr2);
// create 2 more keys
assert.strictEqual(await kv.update('vol_1_3', null, { abc: 'def' }), true);
assert.strictEqual(await kv.update('vol_1_2', null, { def: 'xyz' }), true);
// check listing
let lst: any[] = [];
for await (const item of kv.list('vol_'))
lst.push(item);
assert.deepStrictEqual(lst, [ [ 'vol_1_1', hdr2 ], [ 'vol_1_2', { def: 'xyz' } ], [ 'vol_1_3', { 'abc': 'def' } ] ]);
lst = [];
for await (const item of kv.list('vol_', { def: 'xyz' }))
lst.push(item);
assert.deepStrictEqual(lst, [ [ 'vol_1_2', { def: 'xyz' } ] ]);
// delete key
assert.strictEqual(await kv.update('vol_1_1', hdr2, null), true);
assert.deepStrictEqual(await kv.get('vol_1_1'), null);
});

View File

@@ -1,262 +0,0 @@
import { MongoClient, Db, Collection, MongoServerError } from 'mongodb';
interface MongoKV {
_id: string;
value: any;
}
export class MongoKVWrapper
{
client: MongoClient | null = null;
db: Db | null = null;
db_name: string;
url: string;
collection_name: string;
collection: Collection<MongoKV> | null = null;
options: any;
opened: boolean = false;
on_open: ((...args: any[]) => void)[] | null = null;
open_error: any;
constructor(url: string, db_name: string, collection_name: string, options?: any)
{
this.url = url;
this.db_name = db_name;
this.collection_name = collection_name;
this.options = options;
}
async open()
{
if (!this.collection)
{
if (this.on_open)
{
await new Promise(ok => this.on_open!.push(ok));
}
else
{
this.on_open = [];
try
{
this.client = await MongoClient.connect(this.url, this.options);
this.db = this.client.db(this.db_name, { ignoreUndefined: true });
await this.db.createCollection(this.collection_name);
this.collection = this.db.collection<MongoKV>(this.collection_name);
}
catch (e)
{
if (this.client)
{
this.client.close().catch(console.error);
this.client = null;
}
this.db = null;
this.open_error = e;
}
this.opened = true;
this.on_open.map(cb => setImmediate(cb));
this.on_open = null;
}
}
if (this.open_error)
{
throw this.open_error;
}
}
async close()
{
if (this.collection)
{
this.collection = null;
}
if (this.client)
{
await this.client!.close();
this.client = null;
}
}
async get(key: string)
{
const doc = await this.collection!.findOne({ _id: key });
return doc ? doc.value : null;
}
async update(key: string, old_value: any, value: any): Promise<boolean>
{
if (old_value === null)
{
try
{
const res = await this.collection!.insertOne({ _id: key, value: value });
if (res.insertedId !== key)
throw new Error('mongodb: insertOne insertedId='+res.insertedId+', but should be '+key);
}
catch (e)
{
if ((e instanceof MongoServerError) && e.code == 11000)
return false;
throw e;
}
return true;
}
else if (value !== null)
{
const doc = await this.collection!.findOneAndUpdate(
{ _id: key, value: old_value },
{ '$set': { value: value } },
);
return !!doc.value;
}
else
{
const res = await this.collection!.deleteOne({ _id: key, value: old_value });
return res.deletedCount > 0;
}
}
async* list(start_key: string, filter?: object)
{
const mongo_filter = { _id: { '$gte': start_key } };
if (filter)
{
for (const key in filter)
{
mongo_filter["value."+key] = { '$eq': filter[key] };
}
}
for await (const item of this.collection!.find(mongo_filter))
{
yield [ item._id, item.value ];
}
}
}
export class VitastorKVWrapper
{
config: any;
kv: any;
cli: any;
vitastor: any;
opened: boolean = false;
on_open: ((...args: any[]) => void)[] | null = null;
open_error: any;
constructor(config, cli, vitastor)
{
this.config = config;
this.cli = cli;
this.vitastor = vitastor;
this.kv = new vitastor.KV(this.cli);
}
async open()
{
if (!this.opened)
{
if (this.on_open)
{
await new Promise(ok => this.on_open!.push(ok));
}
else
{
this.on_open = [];
try
{
if (this.config.metadata_image)
{
const img = new this.vitastor.Image(this.cli, this.config.metadata_image);
const info = await new Promise<{ pool_id: number, inode_num: number }>(ok => img.get_info(ok));
if (!info.inode_num)
throw new Error('vitastorkv metadata image '+this.config.metadata_image+' does not exist');
this.config.metadata_pool_id = info.pool_id;
this.config.metadata_inode_num = info.inode_num;
}
await new Promise<void>((ok, no) => this.kv.open(
this.config.metadata_pool_id, this.config.metadata_inode_num,
this.config.vitastor || {}, err => (err ? no(new Error(err)) : ok())
));
}
catch (e)
{
this.open_error = e;
}
this.opened = true;
this.on_open.map(cb => setImmediate(cb));
this.on_open = null;
}
}
if (this.open_error)
{
throw this.open_error;
}
}
async close()
{
if (this.opened && !this.open_error)
{
await new Promise<void>((ok, no) => this.kv.close(err => (err ? no(err) : ok())));
this.opened = false;
}
}
async get(key: string)
{
const [ err, prev ] = await new Promise<[ any, string ]>(ok => this.kv.get(key, (err, value) => ok([ err, value ])));
if (err == this.vitastor.ENOENT)
return null;
else if (err)
throw new Error('vitastorkv get: error '+err);
return JSON.parse(prev);
}
async update(key: string, old_value: any, value: any): Promise<boolean>
{
const cas = (old_value !== null ? (cas_old => cas_old === JSON.stringify(old_value)) : (cas_old => !cas_old));
const err = await new Promise(ok => (value !== null
? this.kv.set(key, JSON.stringify(value), ok, cas)
: this.kv.del(key, ok, cas)));
if (err === this.vitastor.EINTR)
return false;
else if (err)
throw new Error((value !== null ? 'vitastorkv set: error ' : 'vitastorkv del: error ')+err);
return true;
}
async* list(start_key: string, filter?: object)
{
const lst = this.kv.list(start_key);
try
{
next_key: while (true)
{
const [ err, key, value ] = await new Promise<[ number, string, string ]>(ok => lst.next((e, k, v) => ok([ e, k, v ])));
if (err)
{
if (err != this.vitastor.ENOENT)
throw new Error('Error listing: '+err);
break;
}
const decoded = JSON.parse(value);
if (filter)
{
for (const k in filter)
{
if (decoded[k] != filter[k])
{
continue next_key;
}
}
}
yield [ key, decoded ];
}
}
finally
{
lst.close();
}
}
}

View File

@@ -1,153 +0,0 @@
// Zenko CloudServer Vitastor data storage backend adapter
// Copyright (c) Vitaliy Filippov, 2019+
// License: VNPL-1.1 (see README.md for details)
const assert = require('assert');
const crypto = require('crypto');
const { Readable } = require('stream');
const VitastorBackend = require('./VitastorBackend');
const VitastorMock = require('./VitastorMock');
test('basic read and write', async () =>
{
const origNow = Date.now;
const startTs = Date.now();
const metastub = {};
const backend = new VitastorBackend('standard', {
pool_id: 1,
metadata_pool_id: 2,
metadata_inode_num: 1,
read_chunk_size: 128*1024,
write_chunk_size: 128*1024,
open_volume_limit: 0,
open_volume_max_unused_sec: 30,
open_volume_max_garbage: 0.25,
closed_volume_max_garbage: 0.25,
volume_lock_timeout_sec: 10,
volume_lock_interval_sec: 5,
min_volume_size: 128*1024,
defrag_interval_sec: 600,
defrag_cleanup_age_sec: 20,
}, metastub, VitastorMock);
// test put
const size = Math.floor(128*1024+Math.random()*128*1024);
const putData1 = crypto.randomBytes(size);
const getInfo1 = await new Promise((ok, no) => backend.put(new MockReadStream(putData1), size,
{ bucketName: 'testBucket', objectKey: 'abcd/efg', partNumber: 2 },
'test:1', (err, res) => err ? no(err) : ok(res)));
console.log('object written:', getInfo1);
// test get
const readStream1 = await new Promise((ok, no) => backend.get({ key: getInfo1 }, null,
'test:2', (err, res) => err ? no(err) : ok(res)));
const getData1 = await readAll(readStream1);
if (Buffer.compare(getData1, putData1) != 0)
throw new Error('data differs');
console.log('object read back OK');
// test del
await new Promise((ok, no) => backend.delete({ key: getInfo1 },
'test:3', (err, res) => err ? no(err) : ok(res)));
if (!backend.cli.inodes[1] ||
!backend.cli.inodes[1][1] ||
!backend.cli.inodes[1][1].data)
throw new Error('inode 1/1 is missing');
const volData1 = backend.cli.inodes[1][1].data;
console.log('object deleted OK:',
volData1.slice(getInfo1.offset, getInfo1.offset+16).toString('hex'),
volData1.slice(getInfo1.offset+16, getInfo1.offset+getInfo1.hdrlen).toString(),
);
if (volData1.slice(getInfo1.offset, getInfo1.offset+8).toString() != VitastorBackend.OBJECT_MAGIC)
throw new Error('invalid header magic');
if (volData1.readUInt32LE(getInfo1.offset+8) != VitastorBackend.FLAG_DELETED)
throw new Error('invalid header flags');
if (volData1.readUInt32LE(getInfo1.offset+12) != (getInfo1.hdrlen-16))
throw new Error('invalid header json length');
// test deletion statistics
clearTimeout(backend.volume_stats_timer_id);
await backend._writeVolumeStats();
const volHeader = JSON.parse(backend.kv.kv.data['vol_1_1']);
assert(volHeader.removed_objects == 1);
assert(volHeader.removed_bytes == size);
console.log('deletion statistics written ok');
// one more put
const size2 = Math.floor(128*1024+Math.random()*128*1024);
const putData2 = crypto.randomBytes(size);
const getInfo2 = await new Promise((ok, no) => backend.put(new MockReadStream(putData2), size,
{ bucketName: 'testBucket', objectKey: 'hello' },
'test:4', (err, res) => err ? no(err) : ok(res)));
console.log('object written:', getInfo2);
// test closing unused volumes
Date.now = () => startTs+45000;
clearTimeout(backend.bump_timer_id);
await backend._bumpVolumes();
assert.deepEqual(backend.volumes, { testBucket: {} });
assert.deepEqual(backend.volumes_by_id, { '1': {} });
console.log('unused volume closed');
// test defrag
metastub.replaceDataLocations = (bucket, location, replacements, cb) => cb(null, 1);
await backend._autoDefrag();
console.log(backend.kv.kv.data);
assert.equal(JSON.parse(backend.kv.kv.data['vol_1_1']).defrag_ts, Date.now());
assert.equal(Object.keys(backend.volumes['testBucket']).length, 1);
console.log('basic defrag ok');
// test purging volume data
Date.now = () => startTs+70000;
await backend._autoDefrag();
assert(backend.cli.inodes[1][1].data.length == 0);
assert(backend.kv.kv.data['vol_1_1'] === undefined);
console.log('volume data purged ok');
// stop timers and so on
backend.destroy();
Date.now = origNow;
});
function readAll(readable)
{
return new Promise((ok, no) =>
{
const chunks = [];
readable.on('error', no);
readable.on('readable', () =>
{
let chunk;
while (null !== (chunk = readable.read()))
chunks.push(chunk);
});
readable.on('end', () => ok(Buffer.concat(chunks)));
});
}
class MockReadStream extends Readable
{
constructor(buffer, chunk_size, stream_options = undefined)
{
super(stream_options);
this.buffer = buffer;
this.chunk_size = chunk_size || 16384;
this.pos = 0;
this._reading = false;
}
_read(n)
{
if (this.pos >= this.buffer.length)
{
// EOF
this.push(null);
return;
}
const data = this.buffer.slice(this.pos, this.pos + (n && n < this.chunk_size ? n : this.chunk_size));
this.pos += data.length;
this.push(data);
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,399 +0,0 @@
// Zenko CloudServer Vitastor data storage backend adapter
// Copyright (c) Vitaliy Filippov, 2019+
// License: VNPL-1.1 (see README.md for details)
const ENOENT = -2;
const EINTR = -4;
const EINVAL = -22;
const IMMEDIATE_NONE = 0;
const IMMEDIATE_SMALL = 1;
const IMMEDIATE_ALL = 2;
class Client
{
min_io_size: number = 4096;
max_atomic_write_size: number = 131072;
immediate_commit: boolean = true;
inodes: { [pool_id: string]: { [inode_id: string]: { data: Buffer, versions: bigint[] } } } = {};
on_read: null | ((pool_id: number, inode_id: number, offset: number, length: number, callback: (number) => void) => void) = null;
on_write: null | ((pool_id: number, inode_id: number, offset: number,
data: Buffer|Buffer[], cas: { version: number|bigint }|null, callback: (number) => void) => void) = null;
on_delete: null | ((pool_id: number, inode_id: number, offset: number,
length: number, cas: { version: number|bigint }|null, callback: (number) => void) => void) = null;
on_sync: null | ((callback: (number) => void) => void) = null;
get_min_io_size(pool_id: number)
{
return this.min_io_size;
}
get_max_atomic_write_size(pool_id: number)
{
return this.max_atomic_write_size;
}
get_immediate_commit(pool_id: number)
{
return this.immediate_commit;
}
read(pool_id: number, inode_id: number, offset: number|bigint, length: number|bigint,
callback: (err: number|null, buf: Buffer|null, version: bigint|null) => void)
{
if (!this.on_read)
{
setImmediate(() => this._read(pool_id, inode_id, Number(offset), Number(length), callback));
}
else
{
this.on_read(pool_id, inode_id, Number(offset), Number(length), (err) =>
{
if (err != 0)
callback(err, null, null);
else
this._read(pool_id, inode_id, offset, length, callback);
});
}
}
_read(pool_id: number, inode_id: number, offset: number|bigint, length: number|bigint,
callback: (err: number|null, buf: Buffer|null, version: bigint|null) => void)
{
const noffset = Number(offset);
const nlength = Number(length);
const ino = this.inodes[pool_id]?.[inode_id];
const buf = Buffer.alloc(nlength);
let vers: bigint = 0n;
if (ino)
{
if (Math.floor((noffset + nlength - 1) / this.max_atomic_write_size) ==
Math.floor(noffset / this.max_atomic_write_size))
{
vers = ino.versions[Math.floor(noffset / this.max_atomic_write_size)] || 0n;
}
if (ino.data.length > noffset)
{
ino.data.copy(buf, 0, noffset, noffset+nlength < ino.data.length ? noffset+nlength : ino.data.length);
}
}
callback(null, buf, vers);
}
write(pool_id: number, inode_id: number, offset: number|bigint, data: Buffer|Buffer[],
options: { version: number|bigint } | ((err: number|null) => void) | null,
callback?: (err: number|null) => void)
{
const cas = options instanceof Function ? null : options;
callback = options instanceof Function ? options : callback;
if (!callback)
{
throw new Error('callback is required');
}
if (!this.on_write)
{
setImmediate(() => this._write(pool_id, inode_id, offset, data, cas, callback!));
}
else
{
this.on_write(pool_id, inode_id, Number(offset), data, cas, (err) =>
{
if (err != 0)
callback!(err);
else
this._write(pool_id, inode_id, offset, data, cas, callback!);
});
}
}
_write(pool_id: number, inode_id: number, offset: number|bigint, data: Buffer|Buffer[],
cas: { version: number|bigint } | null,
callback: (err: number|null) => void)
{
const adata: Buffer[] = (data instanceof Buffer ? [ data ] : data);
const length = adata.reduce((a, c) => a + c.length, 0);
const first_block = Math.floor(Number(offset) / this.max_atomic_write_size);
const last_block = Math.floor((Number(offset) + length - 1) / this.max_atomic_write_size);
if (cas && cas.version && first_block != last_block)
{
callback!(EINVAL);
return;
}
if (!this.inodes[pool_id])
{
this.inodes[pool_id] = {};
}
if (!this.inodes[pool_id][inode_id])
{
this.inodes[pool_id][inode_id] = {
data: Buffer.alloc(this.max_atomic_write_size < length ? length : this.max_atomic_write_size),
versions: [],
};
}
const ino = this.inodes[pool_id][inode_id];
if (cas && cas.version && BigInt(cas.version)-1n != (ino.versions[first_block] || 0n))
{
callback!(EINTR);
return;
}
for (let i = first_block; i <= last_block; i++)
{
ino.versions[i] = (ino.versions[i] || 0n) + 1n;
}
let coff = Number(offset);
if (ino.data.length < coff+length)
{
const nd = Buffer.alloc(ino.data.length*2 < coff+length ? coff+length : ino.data.length*2);
ino.data.copy(nd);
ino.data = nd;
}
for (const buf of adata)
{
buf.copy(ino.data, coff);
coff += buf.length;
}
callback!(0);
}
delete(pool_id: number, inode_id: number, offset: number|bigint, length: number|bigint,
options: { version: number|bigint } | ((err: number|null) => void) | null,
callback?: (err: number|null) => void)
{
const cas = options instanceof Function ? null : options;
callback = options instanceof Function ? options : callback;
if (!callback)
{
throw new Error('callback is required');
}
if (!this.on_delete)
{
setImmediate(() => this._delete(pool_id, inode_id, offset, length, cas, callback!));
}
else
{
this.on_delete(pool_id, inode_id, Number(offset), Number(length), cas, (err) =>
{
if (err != 0)
callback!(err);
else
this._delete(pool_id, inode_id, offset, length, cas, callback!);
});
}
}
_delete(pool_id: number, inode_id: number, offset: number|bigint, length: number|bigint,
cas: { version: number|bigint } | null,
callback: (err: number|null) => void)
{
const first_block = Math.floor(Number(offset) / this.max_atomic_write_size);
const last_block = Math.floor((Number(offset) + Number(length) - 1) / this.max_atomic_write_size);
if (cas && cas.version && first_block != last_block)
{
callback!(EINVAL);
return;
}
if (!this.inodes[pool_id])
{
this.inodes[pool_id] = {};
}
if (!this.inodes[pool_id][inode_id])
{
this.inodes[pool_id][inode_id] = {
data: Buffer.alloc(this.max_atomic_write_size < Number(length) ? Number(length) : this.max_atomic_write_size),
versions: [],
};
}
const ino = this.inodes[pool_id][inode_id];
if (cas && cas.version && BigInt(cas.version)-1n != (ino.versions[first_block] || 0n))
{
callback!(EINTR);
return;
}
for (let i = first_block; i <= last_block; i++)
{
delete ino.versions[i];
}
if (ino.data.length <= (last_block+1)*this.max_atomic_write_size)
{
ino.data = ino.data.slice(0, first_block*this.max_atomic_write_size);
}
else
{
ino.data.fill(0, first_block*this.max_atomic_write_size, (last_block+1)*this.max_atomic_write_size);
}
callback!(0);
}
sync(callback: (err: number|null) => void)
{
if (!this.on_sync)
{
setImmediate(() => this._sync(callback));
}
else
{
this.on_sync((err) =>
{
if (err != 0)
callback(err);
else
this._sync(callback);
});
}
}
_sync(callback: (err: number|null) => void)
{
callback(0);
}
}
class KV
{
data: { [key: string]: string } = {};
keys: string[] | null = null;
size: number = 0;
open(pool_id: number, inode_id: number, params: { [key: string]: string }, callback: (err: number|null) => void)
{
setImmediate(() => callback(null));
}
close(callback: () => void)
{
setImmediate(() => callback());
}
get_size()
{
return this.size;
}
get(key: string, callback: (err: number|null, value: string|null) => void)
{
const cur = this.data[key];
setImmediate(() => callback(cur === undefined ? ENOENT : null, cur === undefined ? null : cur));
}
get_cached(key: string, callback: (err: number|null, value: string|null) => void)
{
return this.get(key, callback);
}
set(key: string, value: string, callback: (err: number|null) => void, cas_compare?: (old_value: string|null) => boolean)
{
if (cas_compare)
{
const cur = this.data[key];
if (!cas_compare(cur === undefined ? null : cur))
{
setImmediate(() => callback(EINTR));
return;
}
}
this.data[key] = value;
if (!(key in this.data))
{
this.keys = null;
}
setImmediate(() => callback(null));
}
del(key: string, callback: (err: number|null) => void, cas_compare?: (old_value: string|null) => boolean)
{
if (cas_compare)
{
const cur = this.data[key];
if (!cas_compare(cur === undefined ? null : cur))
{
setImmediate(() => callback(EINTR));
return;
}
}
delete this.data[key];
this.keys = null;
setImmediate(() => callback(null));
}
list(start_key: string)
{
return new KVListing(this, start_key);
}
}
class KVListing
{
kv: KV;
next_key: string;
ge: boolean = true;
keys: string[] | null = null;
pos: number = 0;
constructor(kv: KV, start_key: string)
{
this.kv = kv;
this.next_key = start_key;
}
next(callback: (err: number|null, key: string|null, value: string|null) => void)
{
if (this.pos < 0)
{
setImmediate(() => callback(ENOENT, null, null));
return;
}
if (!this.keys || this.kv.keys != this.keys)
{
if (!this.kv.keys)
this.kv.keys = Object.keys(this.kv.data).sort();
this.keys = this.kv.keys;
this.pos = 0;
if (this.next_key != '')
{
let start = 0, end = this.keys.length;
while (end > start+1)
{
const mid = 0|((start+end)/2);
if (this.next_key < this.keys[mid])
start = mid;
else
end = mid;
}
if (!this.ge)
{
while (start < this.keys.length && this.next_key == this.keys[start])
start++;
}
this.pos = start;
}
}
if (this.pos < this.keys.length)
{
const key = this.keys[this.pos];
const value = this.kv.data[key];
this.pos++;
this.next_key = key;
this.ge = false;
setImmediate(() => callback(null, key, value));
}
else
{
this.pos = -1;
setImmediate(() => callback(ENOENT, null, null));
}
}
close()
{
}
}
module.exports = {
Client,
KV,
ENOENT,
EINTR,
EINVAL,
IMMEDIATE_NONE,
IMMEDIATE_SMALL,
IMMEDIATE_ALL,
};

View File

@@ -43,7 +43,7 @@ function _parseListEntries(entries) {
Initiated: tmp.initiated,
Initiator: tmp.initiator,
EventualStorageBucket: tmp.eventualStorageBucket,
location: tmp.location||tmp.partLocations,
partLocations: tmp.partLocations,
creationDate: tmp.creationDate,
ingestion: tmp.ingestion,
},
@@ -517,13 +517,6 @@ class MetadataWrapper {
});
}
replaceDataLocations(bucketName, dataStoreName, replacements, cb) {
if (typeof this.client.replaceDataLocations !== 'function') {
return cb(errors.NotImplemented);
}
return this.client.replaceDataLocations(bucketName, dataStoreName, replacements, cb);
}
/**
* updates(insert, if missing) an object that matches the given conditions
* @param{string} bucketName -

View File

@@ -3,7 +3,8 @@
const fs = require('fs');
const assert = require('assert');
const uuid = require('uuid');
const { Level } = require('level');
const level = require('level');
const sublevel = require('level-sublevel');
const debug = require('debug')('MetadataFileServer');
const diskusage = require('diskusage');
const werelogs = require('werelogs');
@@ -157,8 +158,8 @@ class MetadataFileServer {
// /metadata namespace
const namespace = `${constants.metadataFileNamespace}/metadata`;
this.logger.info(`creating metadata service at ${namespace}`);
this.baseDb = new Level(`${this.path}/${ROOT_DB}`);
this.rootDb = this.baseDb;
this.baseDb = level(`${this.path}/${ROOT_DB}`);
this.rootDb = sublevel(this.baseDb);
const dbService = new levelNet.LevelDbService({
rootDb: this.rootDb,
namespace,

View File

@@ -2,7 +2,7 @@ const Readable = require('stream').Readable;
const MongoUtils = require('./utils');
class MongoReadStream extends Readable {
constructor(c, options, searchOptions, withLocation) {
constructor(c, options, searchOptions) {
super({
objectMode: true,
highWaterMark: 0,
@@ -85,7 +85,7 @@ class MongoReadStream extends Readable {
Object.assign(query, searchOptions);
}
const projection = withLocation ? undefined : { 'value.location': 0 };
const projection = { 'value.location': 0 };
this._cursor = c.find(query, { projection }).sort({
_id: options.reverse ? -1 : 1,
});

View File

@@ -0,0 +1,242 @@
const errors = require('../../../errors').default;
const {
supportedOperators,
validateConditionsObject,
} = require('../conditions');
const { DbPrefixes, BucketVersioningKeyFormat } = require('../../../versioning/constants').VersioningConstants;
const VID_SEP = require('../../../versioning/constants')
.VersioningConstants.VersionId.Separator;
function escape(obj) {
const _obj = {};
Object.keys(obj).forEach(prop => {
const _prop = prop.
replace(/\$/g, '\uFF04').
replace(/\./g, '\uFF0E');
_obj[_prop] = obj[prop];
});
return _obj;
}
function unescape(obj) {
const _obj = {};
Object.keys(obj).forEach(prop => {
const _prop = prop.
replace(/\uFF04/g, '$').
replace(/\uFF0E/g, '.');
_obj[_prop] = obj[prop];
});
return _obj;
}
function serialize(objMD) {
// Tags require special handling since dot and dollar are accepted
if (objMD.tags) {
// eslint-disable-next-line
objMD.tags = escape(objMD.tags);
}
}
function unserialize(objMD) {
// Tags require special handling
if (objMD.tags) {
// eslint-disable-next-line
objMD.tags = unescape(objMD.tags);
}
}
function credPrefix(authCredentials) {
let cred = '';
if (authCredentials &&
authCredentials.username &&
authCredentials.password) {
const username = encodeURIComponent(authCredentials.username);
const password = encodeURIComponent(authCredentials.password);
cred = `${username}:${password}@`;
}
return cred;
}
function _assignCondition(prefix, object, cond) {
if (!validateConditionsObject(cond) || prefix === '') {
throw errors.InternalError;
}
// eslint-disable-next-line no-param-reassign
object[prefix] = cond;
}
/*
* converts conditions object into mongodb-usable filters
* Ex:
* { {
* hello: {
* world: 42 ====> 'hello.world': 42,
* }
* } }
*
* { {
* hello: {
* world: {
* '$eq': 42 ====> 'hello.world': { '$eq': 42 },
* }
* }
* } }
*/
function translateConditions(depth, prefix, object, cond) {
if (depth < 0 || depth > 10) {
throw errors.InternalError;
}
if (Array.isArray(cond) ||
cond === null ||
cond === undefined) {
throw errors.InternalError;
}
if (typeof cond !== 'object') {
_assignCondition(prefix, object, cond);
return;
}
const fields = Object.keys(cond);
const opFields = fields.filter(f => supportedOperators[f]);
if (fields.length === opFields.length) {
_assignCondition(prefix, object, cond);
return;
}
if (opFields.length === 0) {
for (const f of fields) {
if (f.startsWith('$')) {
throw errors.InternalError;
}
const nPrefix = !prefix ? f : `${prefix}.${f}`;
translateConditions(depth + 1, nPrefix, object, cond[f]);
}
return;
}
// mix of operators and nested fields
throw errors.InternalError;
}
/**
* format v0 master key
* @param {String} key object key
* @return {String} formatted key
*/
function formatMasterKeyV0(key) {
return key;
}
/**
* Adds new prefix to v0 key
* @param {String} key object key
* @return {String} formatted key
*/
function formatMasterKeyV1(key) {
return `${DbPrefixes.Master}${key}`;
}
/**
* format v0 version key
* @param {String} key object key
* @param {String} versionId object version
* @return {String} formatted key
*/
function formatVersionKeyV0(key, versionId) {
return `${key}${VID_SEP}${versionId}`;
}
/**
* Adds new prefix to v0 key
* @param {String} key object key
* @param {String} versionId object version
* @return {String} formatted key
*/
function formatVersionKeyV1(key, versionId) {
return `${DbPrefixes.Version}${formatVersionKeyV0(key, versionId)}`;
}
/**
* Formats master key according to bucket format version
* @param {String} key object key
* @param {String} vFormat bucket format version
* @return {String} formatted key
*/
function formatMasterKey(key, vFormat) {
if (vFormat === BucketVersioningKeyFormat.v1) {
return formatMasterKeyV1(key);
}
return formatMasterKeyV0(key);
}
/**
* Formats version key according to bucket format version
* @param {String} key object key
* @param {String} versionId object version
* @param {String} vFormat bucket format version
* @return {String} formatted key
*/
function formatVersionKey(key, versionId, vFormat) {
if (vFormat === BucketVersioningKeyFormat.v1) {
return formatVersionKeyV1(key, versionId);
}
return formatVersionKeyV0(key, versionId);
}
function indexFormatMongoArrayToObject(mongoIndexArray) {
const indexObj = [];
for (const idx of mongoIndexArray) {
const keys = [];
let entries = [];
if (idx.key instanceof Map) {
entries = idx.key.entries();
} else {
entries = Object.entries(idx.key);
}
for (const k of entries) {
keys.push({ key: k[0], order: k[1] });
}
indexObj.push({ name: idx.name, keys });
}
return indexObj;
}
function indexFormatObjectToMongoArray(indexObj) {
const mongoIndexArray = [];
for (const idx of indexObj) {
const key = new Map();
for (const k of idx.keys) {
key.set(k.key, k.order);
}
// copy all field except keys from idx
// eslint-disable-next-line
const { keys: _, ...toCopy } = idx;
mongoIndexArray.push(Object.assign(toCopy, { name: idx.name, key }));
}
return mongoIndexArray;
}
module.exports = {
credPrefix,
escape,
serialize,
unescape,
unserialize,
translateConditions,
formatMasterKey,
formatVersionKey,
indexFormatMongoArrayToObject,
indexFormatObjectToMongoArray,
};

View File

@@ -1,251 +0,0 @@
import {
supportedOperators,
validateConditionsObject,
} from '../conditions';
import { VersioningConstants } from '../../../versioning/constants';
import errors from '../../../errors';
interface AuthCredentials {
username?: string;
password?: string;
}
interface ObjectMetadata {
tags?: Record<string, string>;
[key: string]: any;
}
interface IndexKey {
key: string;
order: number;
}
interface Index {
name: string;
keys: IndexKey[];
}
type Condition = string | number | boolean | Record<string, any>;
function escape(obj: Record<string, string>): Record<string, string> {
const _obj: Record<string, string> = {};
Object.keys(obj).forEach(prop => {
const _prop = prop
.replace(/\$/g, '\uFF04')
.replace(/\./g, '\uFF0E');
_obj[_prop] = obj[prop];
});
return _obj;
}
function unescape(obj: Record<string, string>): Record<string, string> {
const _obj: Record<string, string> = {};
Object.keys(obj).forEach(prop => {
const _prop = prop
.replace(/\uFF04/g, '$')
.replace(/\uFF0E/g, '.');
_obj[_prop] = obj[prop];
});
return _obj;
}
function serialize(objMD: ObjectMetadata): void {
// Tags require special handling since dot and dollar are accepted
if (objMD.tags) {
// eslint-disable-next-line
objMD.tags = escape(objMD.tags);
}
}
function unserialize(objMD: ObjectMetadata): void {
if (objMD.tags) {
objMD.tags = unescape(objMD.tags);
}
}
function credPrefix(authCredentials?: AuthCredentials): string {
if (authCredentials?.username && authCredentials?.password) {
const username = encodeURIComponent(authCredentials.username);
const password = encodeURIComponent(authCredentials.password);
return `${username}:${password}@`;
}
return '';
}
function _assignCondition(prefix: string, object: Record<string, any>, cond: Condition): void {
if (!validateConditionsObject(cond) || prefix === '') {
throw errors.InternalError;
}
// eslint-disable-next-line no-param-reassign
object[prefix] = cond;
}
/*
* converts conditions object into mongodb-usable filters
* Ex:
* { {
* hello: {
* world: 42 ====> 'hello.world': 42,
* }
* } }
*
* { {
* hello: {
* world: {
* '$eq': 42 ====> 'hello.world': { '$eq': 42 },
* }
* }
* } }
*/
function translateConditions(
depth: number,
prefix: string,
object: Record<string, any>,
cond: Condition
): void {
if (depth < 0 || depth > 10) {
throw errors.InternalError;
}
if (Array.isArray(cond) || cond === null || cond === undefined) {
throw errors.InternalError;
}
if (typeof cond !== 'object') {
_assignCondition(prefix, object, cond);
return;
}
const fields = Object.keys(cond as Record<string, any>);
const opFields = fields.filter(f => supportedOperators[f]);
if (fields.length === opFields.length) {
_assignCondition(prefix, object, cond);
return;
}
if (opFields.length === 0) {
for (const f of fields) {
if (f.startsWith('$')) {
throw errors.InternalError;
}
const nPrefix = !prefix ? f : `${prefix}.${f}`;
translateConditions(depth + 1, nPrefix, object, (cond as Record<string, any>)[f]);
}
return;
}
// mix of operators and nested fields
throw errors.InternalError;
}
/**
* format v0 master key
* @param {String} key object key
* @return {String} formatted key
*/
function formatMasterKeyV0(key: string): string {
return key;
}
/**
* Adds new prefix to v0 key
* @param {String} key object key
* @return {String} formatted key
*/
function formatMasterKeyV1(key: string): string {
return `${VersioningConstants.DbPrefixes.Master}${key}`;
}
/**
* format v0 version key
* @param {String} key object key
* @param {String} versionId object version
* @return {String} formatted key
*/
function formatVersionKeyV0(key: string, versionId: string): string {
return `${key}${VersioningConstants.VersionId.Separator}${versionId}`;
}
/**
* Adds new prefix to v0 key
* @param {String} key object key
* @param {String} versionId object version
* @return {String} formatted key
*/
function formatVersionKeyV1(key: string, versionId: string): string {
return `${VersioningConstants.DbPrefixes.Version}${formatVersionKeyV0(key, versionId)}`;
}
/**
* Formats master key according to bucket format version
* @param {String} key object key
* @param {String} vFormat bucket format version
* @return {String} formatted key
*/
function formatMasterKey(key: string, vFormat: string): string {
return vFormat === VersioningConstants.BucketVersioningKeyFormat.v1
? formatMasterKeyV1(key)
: formatMasterKeyV0(key);
}
/**
* Formats version key according to bucket format version
* @param {String} key object key
* @param {String} versionId object version
* @param {String} vFormat bucket format version
* @return {String} formatted key
*/
function formatVersionKey(key: string, versionId: string, vFormat: string): string {
return vFormat === VersioningConstants.BucketVersioningKeyFormat.v1
? formatVersionKeyV1(key, versionId)
: formatVersionKeyV0(key, versionId);
}
interface MongoIndex {
name: string;
key: Map<string, number> | Record<string, number>;
[key: string]: any;
}
function indexFormatMongoArrayToObject(mongoIndexArray: MongoIndex[]): Index[] {
return mongoIndexArray.map(idx => {
const entries = idx.key instanceof Map
? Array.from(idx.key.entries())
: Object.entries(idx.key);
return {
name: idx.name,
keys: entries.map(([key, order]) => ({ key, order }))
};
});
}
function indexFormatObjectToMongoArray(indexObj: Index[]): MongoIndex[] {
return indexObj.map(idx => {
const key = new Map();
idx.keys.forEach(k => key.set(k.key, k.order));
const { keys: _, ...toCopy } = idx;
return { ...toCopy, name: idx.name, key };
});
}
export {
AuthCredentials,
ObjectMetadata,
IndexKey,
Index,
Condition,
MongoIndex,
credPrefix,
escape,
serialize,
unescape,
unserialize,
translateConditions,
formatMasterKey,
formatVersionKey,
indexFormatMongoArrayToObject,
indexFormatObjectToMongoArray,
};

View File

@@ -1,12 +1,3 @@
export enum BucketVersioningFormat {
CURRENT = 'v1',
V0 = 'v0',
V0MIG = 'v0mig',
V0V1 = 'v0v1',
V1MIG = 'v1mig',
V1 = 'v1',
};
export const VersioningConstants = {
VersionId: {
Separator: '\0',
@@ -17,12 +8,12 @@ export const VersioningConstants = {
Replay: '\x7fR',
},
BucketVersioningKeyFormat: {
current: BucketVersioningFormat.CURRENT,
v0: BucketVersioningFormat.V0,
v0mig: BucketVersioningFormat.V0MIG,
v0v1: BucketVersioningFormat.V0V1,
v1mig: BucketVersioningFormat.V1MIG,
v1: BucketVersioningFormat.V1,
current: 'v1',
v0: 'v0',
v0mig: 'v0mig',
v0v1: 'v0v1',
v1mig: 'v1mig',
v1: 'v1',
},
ExternalNullVersionId: 'null',
};

View File

@@ -3,7 +3,7 @@
"engines": {
"node": ">=16"
},
"version": "8.1.147",
"version": "8.1.138",
"description": "Common utilities for the S3 project components",
"main": "build/index.js",
"repository": {
@@ -32,14 +32,15 @@
"bson": "^4.0.0",
"debug": "^4.1.0",
"diskusage": "^1.1.1",
"fcntl": "../fcntl",
"httpagent": "../httpagent",
"fcntl": "git+https://git.yourcmc.ru/vitalif/zenko-fcntl.git",
"httpagent": "git+https://git.yourcmc.ru/vitalif/zenko-httpagent.git#development/1.0",
"https-proxy-agent": "^2.2.0",
"ioredis": "^4.28.5",
"ipaddr.js": "^1.9.1",
"joi": "^17.6.0",
"JSONStream": "^1.0.0",
"level": "^9.0.0",
"level": "^5.0.1",
"level-sublevel": "^6.6.5",
"mongodb": "^5.2.0",
"node-forge": "^1.3.0",
"prom-client": "^14.2.0",
@@ -47,8 +48,8 @@
"socket.io": "^4.6.1",
"socket.io-client": "^4.6.1",
"utf8": "^3.0.0",
"uuid": "^8.3.2",
"werelogs": "../werelogs",
"uuid": "^3.0.1",
"werelogs": "git+https://git.yourcmc.ru/vitalif/zenko-werelogs.git#development/8.1",
"xml2js": "^0.4.23"
},
"optionalDependencies": {
@@ -62,11 +63,11 @@
"@types/utf8": "^3.0.1",
"@types/ioredis": "^4.28.10",
"@types/jest": "^27.4.1",
"@types/node": "^22.13.10",
"@types/node": "^18.19.41",
"@types/xml2js": "^0.4.11",
"eslint": "^8.14.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-config-scality": "../eslint-config-scality",
"eslint-config-scality": "git+https://git.yourcmc.ru/vitalif/zenko-eslint-config-scality.git",
"eslint-plugin-react": "^4.3.0",
"jest": "^27.5.1",
"mongodb-memory-server": "^8.12.2",
@@ -93,20 +94,14 @@
"jest": {
"maxWorkers": 1,
"coverageReporters": [
"json",
"html"
"json"
],
"collectCoverage": true,
"collectCoverageFrom": [
"lib/**/*.{js,ts}",
"index.js"
],
"preset": "ts-jest",
"testEnvironment": "node",
"testPathIgnorePatterns": [
"/node_modules/",
"/build/"
],
"transform": {
"^.\\.ts?$": "ts-jest"
},

View File

@@ -1,142 +0,0 @@
const assert = require('assert');
const sinon = require('sinon');
const util = require('util');
const werelogs = require('werelogs');
const { MongoMemoryReplSet } = require('mongodb-memory-server');
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
const BucketInfo = require('../../../../lib/models/BucketInfo').default;
const MetadataWrapper =
require('../../../../lib/storage/metadata/MetadataWrapper');
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
const BUCKET_NAME = 'bucket';
const BUCKET_MD = {
_owner: 'testowner',
_ownerDisplayName: 'testdisplayname',
_creationDate: new Date().toJSON(),
_acl: {
Canned: 'private',
FULL_CONTROL: [],
WRITE: [],
WRITE_ACP: [],
READ: [],
READ_ACP: [],
},
_mdBucketModelVersion: 10,
_transient: false,
_deleted: false,
_serverSideEncryption: null,
_versioningConfiguration: null,
_locationConstraint: 'us-east-1',
_readLocationConstraint: null,
_cors: null,
_replicationConfiguration: null,
_lifecycleConfiguration: null,
_uid: '',
_isNFS: null,
ingestion: null,
};
const mongoserver = new MongoMemoryReplSet({
debug: false,
instanceOpts: [
{ port: 27021 },
],
replSet: {
name: 'rs0',
count: 1,
DB_NAME,
storageEngine: 'ephemeralForTest',
},
});
describe('MongoClientInterface:metadata.getBucketInfos', () => {
let metadata;
beforeAll(async () => {
await mongoserver.start();
await mongoserver.waitUntilRunning();
const opts = {
mongodb: {
replicaSetHosts: 'localhost:27021',
writeConcern: 'majority',
replicaSet: 'rs0',
readPreference: 'primary',
database: DB_NAME,
},
};
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
metadata.setup = util.promisify(metadata.setup.bind(metadata));
metadata.createBucket = util.promisify(metadata.createBucket.bind(metadata));
metadata.getBucketInfos = util.promisify(metadata.client.getBucketInfos.bind(metadata.client));
metadata.close = util.promisify(metadata.close.bind(metadata));
await metadata.setup();
// create collections + metastore info
for (let i = 0; i < 10; i++) {
const bucketName = `${BUCKET_NAME}${i}`;
const bucketMD = BucketInfo.fromObj({
_name: bucketName,
...BUCKET_MD,
});
await metadata.createBucket(bucketName, bucketMD, logger);
}
});
afterAll(async () => {
await metadata.close();
await mongoserver.stop();
});
afterEach(() => {
sinon.restore();
});
const checkBuckets = async (metadata) => {
const { bucketCount, bucketInfos } = await metadata.getBucketInfos(logger);
assert.strictEqual(bucketCount, 10);
assert.strictEqual(bucketInfos.length, 10);
bucketInfos.sort((a, b) => a.getName().localeCompare(b.getName()));
bucketInfos.forEach((bucketInfo, index) => {
assert.strictEqual(bucketInfo.getName(), `${BUCKET_NAME}${index}`);
});
};
it('should return all collection', async () => {
await checkBuckets(metadata);
});
it('should not return collection w/o bucket', async () => {
await metadata.client.db.createCollection('coll1');
await checkBuckets(metadata);
});
it('should ignore views & system.views collection', async () => {
await metadata.client.db.command({
create: 'view1', viewOn: `${BUCKET_NAME}0`, pipeline: [{
$match: { _locationConstraint: 'us-east-1' },
}],
});
await checkBuckets(metadata);
});
it('should fail on getBucketAttributes error', async () => {
sinon.stub(metadata.client, 'getBucketAttributes')
.callThrough()
.onSecondCall().callsArgWith(2, new Error('error'));
try {
await metadata.getBucketInfos(logger);
assert.fail('Expected an error');
} catch (err) {
assert.strictEqual(err.is?.InternalError, true);
}
});
});

View File

@@ -10,8 +10,6 @@ const { versioning } = require('../../../../index');
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
const sinon = require('sinon');
const MongoReadStream = require('../../../../lib/storage/metadata/mongoclient/readStream');
const { DelimiterMaster } = require('../../../../lib/algos/list/delimiterMaster');
const { FILTER_SKIP } = require('../../../../lib/algos/list/tools');
const IMPL_NAME = 'mongodb';
const DB_NAME = 'metadata';
@@ -168,7 +166,7 @@ describe('MongoClientInterface::metadata.listObject', () => {
dataStoreVersionId: 'versionId',
}],
},
nbVersions: 100,
nbVersions: 5,
};
putBulkObjectVersions(BUCKET_NAME, params.objName, params.objVal, versionParams,
params.nbVersions, next);
@@ -180,7 +178,7 @@ describe('MongoClientInterface::metadata.listObject', () => {
key: 'pfx2-test-object',
versionId: 'null',
},
nbVersions: 100,
nbVersions: 5,
};
putBulkObjectVersions(BUCKET_NAME, params.objName, params.objVal, versionParams,
params.nbVersions, next);
@@ -192,7 +190,7 @@ describe('MongoClientInterface::metadata.listObject', () => {
key: 'pfx3-test-object',
versionId: 'null',
},
nbVersions: 100,
nbVersions: 5,
};
putBulkObjectVersions(BUCKET_NAME, params.objName, params.objVal, versionParams,
params.nbVersions, next);
@@ -202,7 +200,6 @@ describe('MongoClientInterface::metadata.listObject', () => {
afterEach(done => {
metadata.deleteBucket(BUCKET_NAME, logger, done);
sinon.restore();
});
it(`Should list master versions of objects ${variation.it}`, done => {
@@ -272,13 +269,13 @@ describe('MongoClientInterface::metadata.listObject', () => {
const versionsPerKey = {};
return metadata.listObject(bucketName, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.Versions.length, 300);
assert.strictEqual(data.Versions.length, 15);
data.Versions.forEach(version => {
versionsPerKey[version.key] = (versionsPerKey[version.key] || 0) + 1;
});
assert.strictEqual(versionsPerKey['pfx1-test-object'], 100);
assert.strictEqual(versionsPerKey['pfx2-test-object'], 100);
assert.strictEqual(versionsPerKey['pfx3-test-object'], 100);
assert.strictEqual(versionsPerKey['pfx1-test-object'], 5);
assert.strictEqual(versionsPerKey['pfx2-test-object'], 5);
assert.strictEqual(versionsPerKey['pfx3-test-object'], 5);
return done();
});
});
@@ -286,16 +283,16 @@ describe('MongoClientInterface::metadata.listObject', () => {
it(`Should truncate list of master versions of objects ${variation.it}`, done => {
const params = {
listingType: 'DelimiterVersions',
maxKeys: 50,
maxKeys: 5,
};
const versionsPerKey = {};
return metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.Versions.length, 50);
assert.strictEqual(data.Versions.length, 5);
data.Versions.forEach(version => {
versionsPerKey[version.key] = (versionsPerKey[version.key] || 0) + 1;
});
assert.strictEqual(versionsPerKey['pfx1-test-object'], 50);
assert.strictEqual(versionsPerKey['pfx1-test-object'], 5);
return done();
});
});
@@ -309,11 +306,11 @@ describe('MongoClientInterface::metadata.listObject', () => {
const versionsPerKey = {};
return metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.deepStrictEqual(err, null);
assert.strictEqual(data.Versions.length, 100);
assert.strictEqual(data.Versions.length, 5);
data.Versions.forEach(version => {
versionsPerKey[version.key] = (versionsPerKey[version.key] || 0) + 1;
});
assert.strictEqual(versionsPerKey['pfx2-test-object'], 100);
assert.strictEqual(versionsPerKey['pfx2-test-object'], 5);
return done();
});
});
@@ -355,9 +352,9 @@ describe('MongoClientInterface::metadata.listObject', () => {
return get(3, null, null, err => {
assert.deepStrictEqual(err, null);
assert.strictEqual(Object.keys(versionsPerKey).length, 3);
assert.strictEqual(versionsPerKey['pfx1-test-object'], 100);
assert.strictEqual(versionsPerKey['pfx2-test-object'], 100);
assert.strictEqual(versionsPerKey['pfx3-test-object'], 100);
assert.strictEqual(versionsPerKey['pfx1-test-object'], 5);
assert.strictEqual(versionsPerKey['pfx2-test-object'], 5);
assert.strictEqual(versionsPerKey['pfx3-test-object'], 5);
done();
});
});
@@ -422,7 +419,7 @@ describe('MongoClientInterface::metadata.listObject', () => {
async.series([
next => metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.Versions.length, 100);
assert.strictEqual(data.Versions.length, 5);
versionIds = data.Versions.map(version => version.VersionId);
return next();
}),
@@ -441,7 +438,7 @@ describe('MongoClientInterface::metadata.listObject', () => {
next => metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
const newVersionIds = data.Versions.map(version => version.VersionId);
assert.strictEqual(data.Versions.length, 100);
assert.strictEqual(data.Versions.length, 5);
assert(versionIds.every(version => newVersionIds.includes(version)));
return next();
}),
@@ -488,7 +485,7 @@ describe('MongoClientInterface::metadata.listObject', () => {
next => flagObjectForDeletion(objVal.key, next),
next => metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
assert.ifError(err);
assert.strictEqual(data.Versions.length, 300);
assert.strictEqual(data.Versions.length, 15);
const listedObjectNames = data.Versions.map(x => x.key);
assert(!listedObjectNames.includes(objVal.key));
return next();
@@ -515,6 +512,7 @@ describe('MongoClientInterface::metadata.listObject', () => {
});
});
it('Should properly destroy the MongoDBReadStream on error', done => {
// eslint-disable-next-line func-names
const destroyStub = sinon.stub(MongoReadStream.prototype, 'destroy').callsFake(function (...args) {
@@ -539,41 +537,6 @@ describe('MongoClientInterface::metadata.listObject', () => {
});
});
it('Should properly destroy the stream when the skip algorithm triggers the setSkipRangeCb fn', done => {
const destroyStub = sinon.stub(MongoReadStream.prototype, 'destroy');
const extension = new DelimiterMaster({
maxKeys: 100,
}, logger, BucketVersioningKeyFormat.v1);
sinon.stub(extension, 'filter').returns(FILTER_SKIP);
sinon.stub(extension, 'skipping').returns(['newRangeMain', 'newRangeSecondary']);
const params = {
mainStreamParams: {
gte: 'someKey',
},
secondaryStreamParams: null,
mongifiedSearch: false,
};
return metadata.client.internalListObject(BUCKET_NAME, params, extension,
BucketVersioningKeyFormat.v1, logger, err => {
assert(!err, 'No error should occur');
assert(destroyStub.called, 'Destroy should have been called on MongoReadStream');
if (variation.vFormat === BucketVersioningKeyFormat.v1) {
// In v1 case, the skip algorithm will trigger a recursive
// call of the internal listing function
// that should, upon completion, call the destroy method
assert(destroyStub.callCount === 3, 'Destroy should have been called 3 times');
} else {
assert(destroyStub.callCount === 2, 'Destroy should have been called once');
}
return done();
});
});
it('Should not include location in listing result and use custom listing parser', done => {
const opts = {
mongodb: {

View File

@@ -1,229 +0,0 @@
'use strict'; // eslint-disable-line strict
const assert = require('assert');
const sinon = require('sinon');
const Vault = require('../../../lib/auth/Vault').default;
const AuthInfo = require('../../../lib/auth/AuthInfo').default;
const DummyRequestLogger = require('../helpers').DummyRequestLogger;
const log = new DummyRequestLogger();
const mockUserInfo = {
arn: 'arn:aws:iam::123456789012:user/testUser',
canonicalID: 'canonical123',
shortid: '123456789012',
email: 'test@example.com',
accountDisplayName: 'TestAccount',
IAMdisplayName: 'TestUser',
};
describe('Vault class', () => {
let vault;
let mockClient;
let sandbox;
beforeEach(() => {
sandbox = sinon.createSandbox();
mockClient = {
verifySignatureV4: sandbox.stub(),
verifySignatureV2: sandbox.stub(),
healthcheck: sandbox.stub(),
report: sandbox.stub(),
getCanonicalIds: sandbox.stub(),
getEmailAddresses: sandbox.stub(),
getAccountIds: sandbox.stub(),
checkPolicies: sandbox.stub(),
getOrCreateEncryptionKeyId: sandbox.stub(),
};
vault = new Vault(mockClient, 'mockImpl');
});
afterEach(() => {
sandbox.restore();
});
describe('authenticateV4Request', () => {
const mockParams = {
version: 4,
log,
data: {
accessKey: 'testAccessKey',
signatureFromRequest: 'testSignature',
region: 'us-east-1',
stringToSign: 'testStringToSign',
scopeDate: '20250122',
authType: 'header',
signatureVersion: '4',
signatureAge: 0,
timestamp: Date.now(),
credentialScope: 'testScope',
securityToken: 'testToken',
algo: 'sha256',
log,
},
};
it('should handle successful authentication with quota', done => {
const mockAccountQuota = {
account: '123456789012',
quota: BigInt(1000),
};
const mockResponse = {
message: {
message: 'Success',
body: {
userInfo: mockUserInfo,
authorizationResults: [{
isAllowed: true,
isImplicit: false,
arn: mockUserInfo.arn,
action: 'testAction',
}],
accountQuota: mockAccountQuota,
},
},
};
mockClient.verifySignatureV4.callsFake(
(_stringToSign, _signature, _accessKey, _region, _scopeDate,
_options, callback) => {
callback(null, mockResponse);
},
);
vault.authenticateV4Request(mockParams, [], (err, data, results,
_params, infos) => {
assert.strictEqual(err, null);
assert(data instanceof AuthInfo);
assert.strictEqual(data.getCanonicalID(), mockUserInfo.canonicalID);
assert.deepStrictEqual(infos.accountQuota, mockAccountQuota);
done();
});
});
it('should handle authentication with missing quota', done => {
const mockResponse = {
message: {
message: 'Success',
body: {
userInfo: mockUserInfo,
authorizationResults: [{
isAllowed: true,
isImplicit: false,
arn: mockUserInfo.arn,
action: 'testAction',
}],
},
},
};
mockClient.verifySignatureV4.callsFake(
(_stringToSign, _signature, _accessKey, _region, _scopeDate,
_options, callback) => {
callback(null, mockResponse);
},
);
vault.authenticateV4Request(mockParams, [], (err, data, results,
_params, infos) => {
assert.strictEqual(err, null);
assert(data instanceof AuthInfo);
assert.deepStrictEqual(infos.accountQuota, {});
done();
});
});
it('should handle authentication failure', done => {
const mockError = new Error('Authentication failed');
mockClient.verifySignatureV4.callsFake(
(_stringToSign, _signature, _accessKey, _region, _scopeDate,
_options, callback) => {
callback(mockError);
},
);
vault.authenticateV4Request(mockParams, [], err => {
assert.strictEqual(err, mockError);
done();
});
});
it('should properly serialize request contexts', done => {
const mockRequestContexts = [{
serialize: () => ({ serialized: 'context' }),
}];
const mockResponse = {
message: {
message: 'Success',
body: {
userInfo: mockUserInfo,
authorizationResults: [{
isAllowed: true,
isImplicit: false,
arn: mockUserInfo.arn,
action: 'testAction',
}],
},
},
};
mockClient.verifySignatureV4.callsFake(
(_stringToSign, _signature, _accessKey, _region, _scopeDate,
options, callback) => {
assert.deepStrictEqual(options.requestContext,
[{ serialized: 'context' }]);
callback(null, mockResponse);
},
);
vault.authenticateV4Request(mockParams, mockRequestContexts,
(err, data) => {
assert.strictEqual(err, null);
assert(data instanceof AuthInfo);
done();
});
});
it('should handle quota with large numbers', done => {
const largeQuota = {
account: '123456789012',
quota: BigInt('9007199254740992'),
};
const mockResponse = {
message: {
message: 'Success',
body: {
userInfo: mockUserInfo,
authorizationResults: [{
isAllowed: true,
isImplicit: false,
arn: mockUserInfo.arn,
action: 'testAction',
}],
accountQuota: largeQuota,
},
},
};
mockClient.verifySignatureV4.callsFake(
(_stringToSign, _signature, _accessKey, _region, _scopeDate,
_options, callback) => {
callback(null, mockResponse);
},
);
vault.authenticateV4Request(mockParams, [], (err, _data, _results,
_params, infos) => {
assert.strictEqual(err, null);
assert.strictEqual(infos.accountQuota.quota.toString(),
'9007199254740992');
done();
});
});
});
});

View File

@@ -2,7 +2,6 @@ const assert = require('assert');
const BucketInfo = require('../../../lib/models/BucketInfo').default;
const { WebsiteConfiguration } =
require('../../../lib/models/WebsiteConfiguration');
const { VeeamCapacityInfo } = require('../../../lib/models/Veeam');
// create variables to populate dummyBucket
const bucketName = 'nameOfBucket';
@@ -222,15 +221,14 @@ const testBucketCapabilities = {
},
},
CapacityInfo: {
Capacity: 1n,
Available: 1n,
Used: 0n,
LastModified: '2021-09-29T14:00:00.000Z',
Capacity: 1,
Available: 1,
Used: 0,
},
},
};
const testBucketQuota = 100000n;
const testBucketQuota = 100000;
// create a dummy bucket to test getters and setters
Object.keys(acl).forEach(
@@ -294,16 +292,8 @@ Object.keys(acl).forEach(
dummyBucket._objectLockConfiguration,
notificationConfiguration: dummyBucket._notificationConfiguration,
tags: dummyBucket._tags,
capabilities: dummyBucket._capabilities ? {
...dummyBucket._capabilities,
VeeamSOSApi: dummyBucket._capabilities.VeeamSOSApi ? {
...dummyBucket._capabilities.VeeamSOSApi,
CapacityInfo: VeeamCapacityInfo.serialize(
dummyBucket._capabilities.VeeamSOSApi.CapacityInfo,
),
} : undefined,
} : undefined,
quotaMax: dummyBucket._quotaMax.toString(),
capabilities: dummyBucket._capabilities,
quotaMax: dummyBucket._quotaMax,
};
assert.strictEqual(serialized, JSON.stringify(bucketInfos));
done();
@@ -718,7 +708,7 @@ Object.keys(acl).forEach(
it('setQuota should set bucket quota', () => {
dummyBucket.setQuota();
assert.deepStrictEqual(
dummyBucket.getQuota(), 0n);
dummyBucket.getQuota(), 0);
});
});
}),

View File

@@ -28,8 +28,6 @@ describe('RequestContext', () => {
'reqTagOne=valueOne&reqTagTwo=valueTwo', // requestObjTags
'existingTagOne=valueOne&existingTagTwo=valueTwo', // existingObjTag
true, // needTagEval
5, // objectLockRetentionDays
true, // needQuota
];
const rc = new RequestContext(...constructorParams);
@@ -64,12 +62,10 @@ describe('RequestContext', () => {
{ name: 'getMultiFactorAuthAge', expectedValue: null },
{ name: 'getSecurityToken', expectedValue: 'security-token' },
{ name: 'getPolicyArn', expectedValue: 'arn:aws:iam::aws:policy/AmazonS3ReadOnlyAccess' },
{ name: 'isQuotaCheckNeeded', expectedValue: false },
{ name: 'getRequestObjTags', expectedValue: 'reqTagOne=valueOne&reqTagTwo=valueTwo' },
{ name: 'getExistingObjTag', expectedValue: 'existingTagOne=valueOne&existingTagTwo=valueTwo' },
{ name: 'getNeedTagEval', expectedValue: true },
{ name: 'getObjectLockRetentionDays', expectedValue: 5 },
{ name: 'isQuotaCheckNeeded', expectedValue: true },
];
GetterTests.forEach(testCase => {
it(`getter:${testCase.name}`, () => {
@@ -115,8 +111,7 @@ describe('RequestContext', () => {
specificResource: 'specific-resource',
sslEnabled: true,
tokenIssueTime: null,
objectLockRetentionDays: 5,
needQuota: true,
objectLockRetentionDays: null,
};
it('serialize()', () => {
assert.deepStrictEqual(JSON.parse(rc.serialize()), SerializedFields);

View File

@@ -1,7 +1,6 @@
const assert = require('assert');
const DummyRequest = require('../../utils/DummyRequest');
const requestUtils = require('../../../lib/policyEvaluator/requestUtils');
const { TLSSocket } = require('tls');
describe('requestUtils.getClientIp', () => {
// s3 config with 'requests.viaProxy` enabled
@@ -28,27 +27,6 @@ describe('requestUtils.getClientIp', () => {
assert.strictEqual(result, testClientIp1);
});
it('should return client Ip address in the proxy case when the header has uppercases', () => {
const request = new DummyRequest({
headers: {
'x-forwarded-for': [testClientIp1, testProxyIp].join(','),
},
url: '/',
parsedHost: 'localhost',
socket: {
remoteAddress: testProxyIp,
},
});
const result = requestUtils.getClientIp(request, {
requests: {
viaProxy: true,
trustedProxyCIDRs: ['192.168.100.0/22'],
extractClientIPFromHeader: 'X-Forwarded-For',
},
});
assert.strictEqual(result, testClientIp1);
});
it('should return client Ip address from socket info if the request is not forwarded from proxies', () => {
const request = new DummyRequest({
headers: {},
@@ -78,8 +56,8 @@ describe('requestUtils.getClientIp', () => {
assert.strictEqual(result, testClientIp2);
});
it('should not return client Ip address from header if the request comes via proxies and ' +
'no request config is available as the proxy is not trusted', () => {
it('should return client Ip address from header if the request comes via proxies and ' +
'no request config is available', () => {
const request = new DummyRequest({
headers: {
'x-forwarded-for': testClientIp1,
@@ -91,7 +69,7 @@ describe('requestUtils.getClientIp', () => {
},
});
const result = requestUtils.getClientIp(request, configWithoutProxy);
assert.strictEqual(result, testProxyIp);
assert.strictEqual(result, testClientIp1);
});
it('should return client Ip address from socket info if the request comes via proxies and ' +
@@ -111,81 +89,3 @@ describe('requestUtils.getClientIp', () => {
assert.strictEqual(result, dummyRemoteIP);
});
});
describe('requestUtils.getHttpProtocolSecurity', () => {
const configWithProxy = require('../../utils/dummyS3ConfigProxy.json');
const configWithoutProxy = require('../../utils/dummyS3Config.json');
const testClientIp = '192.168.100.1';
const testProxyIp = '192.168.100.2';
it('should return true if request comes via trusted proxy with https proto header', () => {
const request = new DummyRequest({
headers: {
'x-forwarded-proto': 'https',
},
socket: {
remoteAddress: testProxyIp,
},
});
const result = requestUtils.getHttpProtocolSecurity(request, configWithProxy);
assert.strictEqual(result, true);
});
it('should return false if request comes via trusted proxy with http proto header', () => {
const request = new DummyRequest({
headers: {
'x-forwarded-proto': 'http',
},
socket: {
remoteAddress: testProxyIp,
},
});
const result = requestUtils.getHttpProtocolSecurity(request, configWithProxy);
assert.strictEqual(result, false);
});
it('should check TLS when request not from trusted proxy with http', () => {
const request = new DummyRequest({
headers: {
'x-forwarded-proto': 'http',
},
socket: new TLSSocket(null),
});
request.socket.encrypted = true;
const result = requestUtils.getHttpProtocolSecurity(request, configWithoutProxy);
assert.strictEqual(result, true);
});
it('should return false for non-TLS socket', () => {
const request = new DummyRequest({
headers: {
'x-forwarded-proto': 'https',
},
socket: {
remoteAddress: testClientIp,
},
});
const result = requestUtils.getHttpProtocolSecurity(request, configWithoutProxy);
assert.strictEqual(result, false);
});
it('should handle configured headers with uppercases', () => {
const request = new DummyRequest({
headers: {
'x-forwarded-proto': 'https',
},
socket: {
remoteAddress: testProxyIp,
},
});
const result = requestUtils.getHttpProtocolSecurity(request, {
requests: {
viaProxy: true,
trustedProxyCIDRs: ['192.168.100.0/22'],
extractClientIPFromHeader: 'X-Forwarded-For',
extractProtocolFromHeader: 'X-Forwarded-Proto',
},
});
assert.strictEqual(result, true);
});
});

View File

@@ -1,141 +0,0 @@
const http = require('http');
const werelogs = require('werelogs');
const { StatsClient } = require('../../../lib/metrics');
const { routesUtils } = require('../../../lib/s3routes');
const { ArsenalError, default: errors } = require('../../../lib/errors');
const { default: routeDELETE } = require('../../../lib/s3routes/routes/routeDELETE');
const logger = new werelogs.Logger('routeDelete', 'debug', 'debug');
const log = logger.newRequestLogger();
describe('routeDELETE', () => {
let request;
let response;
let api;
let statsClient;
beforeEach(() => {
request = new http.IncomingMessage();
response = new http.ServerResponse(request);
api = { callApiMethod: jest.fn() };
statsClient = new StatsClient();
routesUtils.responseNoBody = jest.fn();
routesUtils.statsReport500 = jest.fn();
});
it('should return InvalidRequest error if uploadId is present but objectKey is undefined', () => {
request.query = { uploadId: '1234' };
request.objectKey = undefined;
routeDELETE(request, response, api, log, statsClient);
const err = errors.InvalidRequest.customizeDescription('A key must be specified');
expect(routesUtils.responseNoBody).toHaveBeenCalledWith(
err, null, response, 200, log,
);
});
it('should call multipartDelete if uploadId and objectKey are present', () => {
request.query = { uploadId: '1234' };
request.objectKey = 'objectKey';
routeDELETE(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'multipartDelete', request, response, log, expect.any(Function),
);
});
it('should call bucketDeleteWebsite if query.website is present and objectKey is undefined', () => {
request.query = { website: true };
request.objectKey = undefined;
routeDELETE(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'bucketDeleteWebsite', request, response, log, expect.any(Function),
);
});
it('should call bucketDelete when objectKey and query are undefined', () => {
request.query = {};
request.objectKey = undefined;
routeDELETE(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'bucketDelete', request, response, log, expect.any(Function),
);
});
it('should call objectDelete if objectKey is present and no query is defined', () => {
request.objectKey = 'objectKey';
request.query = {};
routeDELETE(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'objectDelete', request, response, log, expect.any(Function),
);
});
it('should call objectDeleteTagging if query.tagging is present and objectKey is defined', () => {
request.query = { tagging: true };
request.objectKey = 'objectKey';
routeDELETE(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'objectDeleteTagging', request, response, log, expect.any(Function),
);
});
it('should return 204 when objectDelete encounters NoSuchKey errors', () => {
request.objectKey = 'objectKey';
request.query = {};
const noSuchKeyError = new ArsenalError('NoSuchKey');
api.callApiMethod = jest.fn((method, req, res, log, callback) => {
callback(noSuchKeyError, {});
});
routeDELETE(request, response, api, log, statsClient);
expect(routesUtils.responseNoBody).toHaveBeenCalledWith(
null, {}, response, 204, log,
);
});
it('should return 204 when objectDelete encounters NoSuchVersion errors', () => {
request.objectKey = 'objectKey';
request.query = {};
const noSuchVersionError = new ArsenalError('NoSuchVersion');
api.callApiMethod = jest.fn((method, req, res, log, callback) => {
callback(noSuchVersionError, {});
});
routeDELETE(request, response, api, log, statsClient);
expect(routesUtils.responseNoBody).toHaveBeenCalledWith(
null, {}, response, 204, log,
);
});
it('should return error code when objectDelete encounters non-arsenal errors', () => {
request.objectKey = 'objectKey';
request.query = {};
const otherError = new Error('NotAnArsenalError');
api.callApiMethod = jest.fn((method, req, res, log, callback) => {
callback(otherError, {});
});
routeDELETE(request, response, api, log, statsClient);
expect(routesUtils.responseNoBody).toHaveBeenCalledWith(
otherError, {}, response, undefined, log,
);
});
});

View File

@@ -1,131 +0,0 @@
const http = require('http');
const werelogs = require('werelogs');
const { StatsClient } = require('../../../lib/metrics');
const { routesUtils } = require('../../../lib/s3routes');
const { ArsenalError } = require('../../../lib/errors');
const { default: routerGET } = require('../../../lib/s3routes/routes/routeGET');
const logger = new werelogs.Logger('routeGET', 'debug', 'debug');
const log = logger.newRequestLogger();
describe('routerGET', () => {
let request;
let response;
let api;
let statsClient;
let dataRetrievalParams;
beforeEach(() => {
request = new http.IncomingMessage();
response = new http.ServerResponse(request);
api = { callApiMethod: jest.fn() };
statsClient = new StatsClient();
dataRetrievalParams = {};
routesUtils.responseXMLBody = jest.fn();
routesUtils.responseStreamData = jest.fn();
routesUtils.statsReport500 = jest.fn();
});
it('should return NoSuchBucket error when objectKey is defined but bucketName is undefined', () => {
request.bucketName = undefined;
request.objectKey = 'objectKey';
request.query = {};
routerGET(request, response, api, log, statsClient, dataRetrievalParams);
expect(routesUtils.responseXMLBody).toHaveBeenCalledWith(
new ArsenalError('NoSuchBucket'),
null,
response,
log,
);
});
it('should call serviceGet when bucketName and objectKey are undefined', () => {
request.bucketName = undefined;
request.objectKey = undefined;
request.query = {};
routerGET(request, response, api, log, statsClient, dataRetrievalParams);
expect(api.callApiMethod).toHaveBeenCalledWith(
'serviceGet', request, response, log, expect.any(Function),
);
});
it('should call bucketGetACL when bucketName is defined and query.acl is present', () => {
request.bucketName = 'bucketName';
request.objectKey = undefined;
request.query = { acl: true };
routerGET(request, response, api, log, statsClient, dataRetrievalParams);
expect(api.callApiMethod).toHaveBeenCalledWith(
'bucketGetACL', request, response, log, expect.any(Function),
);
});
it('should call objectGet when both bucketName and objectKey are defined and no specific query is present', () => {
request.bucketName = 'bucketName';
request.objectKey = 'objectKey';
request.query = {};
routerGET(request, response, api, log, statsClient, dataRetrievalParams);
expect(api.callApiMethod).toHaveBeenCalledWith(
'objectGet', request, response, log, expect.any(Function),
);
});
it('should call objectGetACL when query.acl is present for an object', () => {
request.bucketName = 'bucketName';
request.objectKey = 'objectKey';
request.query = { acl: true };
routerGET(request, response, api, log, statsClient, dataRetrievalParams);
expect(api.callApiMethod).toHaveBeenCalledWith(
'objectGetACL', request, response, log, expect.any(Function),
);
});
it('should handle objectGet with responseStreamData when no query is present for an object', () => {
request.bucketName = 'bucketName';
request.objectKey = 'objectKey';
request.query = {};
api.callApiMethod = jest.fn((method, req, res, log, callback) => {
callback(null, { data: 'objectData' }, { 'Content-Length': 100 }, null);
});
routerGET(request, response, api, log, statsClient, dataRetrievalParams);
expect(routesUtils.responseStreamData).toHaveBeenCalledWith(
null,
request.query,
{ 'Content-Length': 100 },
{ data: 'objectData' },
dataRetrievalParams,
response,
null,
log,
);
});
it('should report 500 stats if objectGet method returns an error', () => {
request.bucketName = 'bucketName';
request.objectKey = 'objectKey';
request.query = {};
api.callApiMethod = jest.fn((method, req, res, log, callback) => {
callback(new ArsenalError('InternalError'), {}, {});
});
routerGET(request, response, api, log, statsClient, dataRetrievalParams);
expect(routesUtils.statsReport500).toHaveBeenCalledWith(
new ArsenalError('InternalError'), statsClient,
);
});
});

View File

@@ -1,97 +0,0 @@
const werelogs = require('werelogs');
const logger = new werelogs.Logger('test:routesUtils.routeHEAD');
const http = require('http');
const { routesUtils } = require('../../../lib/s3routes');
const { default: errors } = require('../../../lib/errors');
const { StatsClient } = require('../../../lib/metrics');
const { default: routeHEAD } = require('../../../lib/s3routes/routes/routeHEAD');
describe('routeHEAD', () => {
let request;
let response;
let api;
let statsClient;
beforeEach(() => {
request = new http.IncomingMessage(null);
response = new http.ServerResponse(request);
api = {
callApiMethod: jest.fn(),
};
statsClient = new StatsClient();
});
it('should respond with MethodNotAllowed if bucketName is undefined', () => {
request = { ...request, bucketName: undefined };
jest.spyOn(routesUtils, 'responseXMLBody').mockImplementation();
routeHEAD(request, response, api, logger, statsClient);
expect(routesUtils.responseXMLBody).toHaveBeenCalledWith(
errors.MethodNotAllowed,
null,
response,
logger,
);
});
it('should call bucketHead if objectKey is undefined', () => {
request = { ...request, bucketName: 'test-bucket', objectKey: undefined };
jest.spyOn(routesUtils, 'responseNoBody').mockImplementation();
jest.spyOn(routesUtils, 'statsReport500').mockImplementation();
routeHEAD(request, response, api, logger, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'bucketHead',
request,
response,
logger,
expect.any(Function),
);
const callback = api.callApiMethod.mock.calls[0][4];
const corsHeaders = { 'x-amz-cors': 'test' };
callback(null, corsHeaders);
expect(routesUtils.statsReport500).toHaveBeenCalledWith(null, statsClient);
expect(routesUtils.responseNoBody).toHaveBeenCalledWith(
null,
corsHeaders,
response,
200,
logger,
);
});
it('should call objectHead if bucketName and objectKey are defined', () => {
request = { ...request, bucketName: 'test-bucket', objectKey: 'test-object' };
jest.spyOn(routesUtils, 'responseContentHeaders').mockImplementation();
jest.spyOn(routesUtils, 'statsReport500').mockImplementation();
routeHEAD(request, response, api, logger, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'objectHead',
request,
response,
logger,
expect.any(Function),
);
const callback = api.callApiMethod.mock.calls[0][4];
const resHeaders = { 'x-amz-meta-test': 'test' };
callback(null, resHeaders);
expect(routesUtils.statsReport500).toHaveBeenCalledWith(null, statsClient);
expect(routesUtils.responseContentHeaders).toHaveBeenCalledWith(
null,
{},
resHeaders,
response,
logger,
);
});
});

View File

@@ -1,88 +0,0 @@
const http = require('http');
const werelogs = require('werelogs');
const { StatsClient } = require('../../../lib/metrics');
const { routesUtils } = require('../../../lib/s3routes');
const { ArsenalError } = require('../../../lib/errors');
const { default: routeOPTIONS } = require('../../../lib/s3routes/routes/routeOPTIONS');
const logger = new werelogs.Logger('routeOption', 'debug', 'debug');
const log = logger.newRequestLogger();
describe('routeOPTIONS', () => {
let request;
let response;
let api;
let statsClient;
beforeEach(() => {
request = new http.IncomingMessage();
response = new http.ServerResponse(request);
api = { callApiMethod: jest.fn() };
statsClient = new StatsClient();
routesUtils.responseNoBody = jest.fn();
routesUtils.responseXMLBody = jest.fn();
routesUtils.statsReport500 = jest.fn();
});
it('should return BadRequest error if origin header is missing', () => {
request.headers = {
'access-control-request-method': 'GET',
};
routeOPTIONS(request, response, api, log, statsClient);
expect(routesUtils.responseXMLBody).toHaveBeenCalledWith(
new ArsenalError('BadRequest', 'Insufficient information. Origin request header needed.'),
null,
response,
log,
);
});
it('should return BadRequest error for an invalid Access-Control-Request-Method', () => {
request.headers = {
'origin': 'http://example.com',
'access-control-request-method': 'INVALID',
};
routeOPTIONS(request, response, api, log, statsClient);
expect(routesUtils.responseXMLBody).toHaveBeenCalledWith(
new ArsenalError('BadRequest', 'Invalid Access-Control-Request-Method: INVALID'),
null,
response,
log,
);
});
it('should call corsPreflight method for a valid CORS request', () => {
request.headers = {
'origin': 'http://example.com',
'access-control-request-method': 'GET',
};
routeOPTIONS(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'corsPreflight', request, response, log, expect.any(Function),
);
});
it('should report 500 stats if corsPreflight method returns an error', () => {
request.headers = {
'origin': 'http://example.com',
'access-control-request-method': 'GET',
};
api.callApiMethod = jest.fn((method, req, res, log, callback) => {
callback(new ArsenalError('InternalError'), {});
});
routeOPTIONS(request, response, api, log, statsClient);
expect(routesUtils.statsReport500).toHaveBeenCalledWith(
new ArsenalError('InternalError'), statsClient,
);
});
});

View File

@@ -1,250 +0,0 @@
const http = require('http');
const werelogs = require('werelogs');
const { StatsClient } = require('../../../lib/metrics');
const { routesUtils } = require('../../../lib/s3routes');
const { ArsenalError } = require('../../../lib/errors');
const { default: routePUT } = require('../../../lib/s3routes/routes/routePUT');
const logger = new werelogs.Logger('routePut', 'debug', 'debug');
const log = logger.newRequestLogger();
describe('routePUT', () => {
let request;
let response;
let api;
let statsClient;
beforeEach(() => {
request = new http.IncomingMessage();
response = new http.ServerResponse(request);
api = { callApiMethod: jest.fn() };
statsClient = new StatsClient();
routesUtils.responseNoBody = jest.fn();
routesUtils.responseXMLBody = jest.fn();
routesUtils.statsReport500 = jest.fn();
});
it('should call bucketPut when no objectKey is provided', () => {
request.bucketName = 'test-bucket';
request.query = {};
api.callApiMethod = jest.fn();
routePUT(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'bucketPut', request, response, log, expect.any(Function),
);
});
it('should call objectPut when objectKey is present and valid content-md5', () => {
request.objectKey = 'test-object';
request.headers = {
'content-md5': '1B2M2Y8AsgTpgAmY7PhCfg==',
'content-length': 10,
};
request.query = {};
request.bucketName = 'test-bucket';
routePUT(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'objectPut', request, response, log, expect.any(Function),
);
});
it('should return InvalidDigest error for an invalid content-md5', () => {
request.objectKey = 'test-object';
request.headers = { 'content-md5': 'invalid-md5' };
request.query = {};
api.callApiMethod = jest.fn();
routePUT(request, response, api, log, statsClient);
expect(routesUtils.responseNoBody).toHaveBeenCalledWith(
new ArsenalError('InvalidDigest'), null, response, 200, log,
);
});
it('should return MissingContentLength error if content-length is missing', () => {
request.objectKey = 'test-object';
request.headers = { };
request.query = {};
api.callApiMethod = jest.fn();
routePUT(request, response, api, log, statsClient);
expect(routesUtils.responseNoBody).toHaveBeenCalledWith(
new ArsenalError('MissingContentLength'), null, response, 411, log,
);
});
it('should call bucketPutVersioning when query.versioning is set', () => {
request.bucketName = 'test-bucket';
request.query = { versioning: '' };
api.callApiMethod = jest.fn();
routePUT(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'bucketPutVersioning', request, response, log, expect.any(Function),
);
});
it('should call objectCopy when x-amz-copy-source is provided', () => {
request.objectKey = 'test-object';
request.headers = { 'x-amz-copy-source': 'source-bucket/source-key' };
request.query = {};
api.callApiMethod = jest.fn();
routePUT(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'objectCopy', request, response, log, expect.any(Function),
);
});
it('should call objectPutACL when query.acl is present', () => {
request.objectKey = 'test-object';
request.query = { acl: '' };
api.callApiMethod = jest.fn();
routePUT(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'objectPutACL', request, response, log, expect.any(Function),
);
});
it('should call bucketUpdateQuota when query.quota is set', () => {
request.bucketName = 'test-bucket';
request.query = { quota: '' };
api.callApiMethod = jest.fn();
routePUT(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'bucketUpdateQuota', request, response, log, expect.any(Function),
);
});
it('should call bucketPutWebsite when query.website is set', () => {
request.bucketName = 'test-bucket';
request.query = { website: '' };
api.callApiMethod = jest.fn();
routePUT(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'bucketPutWebsite', request, response, log, expect.any(Function),
);
});
it('should call bucketPutTagging when query.tagging is set', () => {
request.bucketName = 'test-bucket';
request.query = { tagging: '' };
api.callApiMethod = jest.fn();
routePUT(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'bucketPutTagging', request, response, log, expect.any(Function),
);
});
it('should call bucketPutCors when query.cors is set', () => {
request.bucketName = 'test-bucket';
request.query = { cors: '' };
api.callApiMethod = jest.fn();
routePUT(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'bucketPutCors', request, response, log, expect.any(Function),
);
});
it('should call bucketPutReplication when query.replication is set', () => {
request.bucketName = 'test-bucket';
request.query = { replication: '' };
api.callApiMethod = jest.fn();
routePUT(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'bucketPutReplication', request, response, log, expect.any(Function),
);
});
it('should call bucketPutLifecycle when query.lifecycle is set', () => {
request.bucketName = 'test-bucket';
request.query = { lifecycle: '' };
api.callApiMethod = jest.fn();
routePUT(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'bucketPutLifecycle', request, response, log, expect.any(Function),
);
});
it('should call bucketPutPolicy when query.policy is set', () => {
request.bucketName = 'test-bucket';
request.query = { policy: '' };
api.callApiMethod = jest.fn();
routePUT(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'bucketPutPolicy', request, response, log, expect.any(Function),
);
});
it('should call bucketPutObjectLock when query.object-lock is set', () => {
request.bucketName = 'test-bucket';
request.query = { 'object-lock': '' };
api.callApiMethod = jest.fn();
routePUT(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'bucketPutObjectLock', request, response, log, expect.any(Function),
);
});
it('should call bucketPutNotification when query.notification is set', () => {
request.bucketName = 'test-bucket';
request.query = { notification: '' };
api.callApiMethod = jest.fn();
routePUT(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'bucketPutNotification', request, response, log, expect.any(Function),
);
});
it('should call bucketPutEncryption when query.encryption is set', () => {
request.bucketName = 'test-bucket';
request.query = { encryption: '' };
api.callApiMethod = jest.fn();
routePUT(request, response, api, log, statsClient);
expect(api.callApiMethod).toHaveBeenCalledWith(
'bucketPutEncryption', request, response, log, expect.any(Function),
);
});
it('should return BadRequest when content-length is invalid for PUT bucket', () => {
request.bucketName = 'test-bucket';
request.query = {};
request.headers['content-length'] = '-1';
routePUT(request, response, api, log, statsClient);
expect(routesUtils.responseNoBody).toHaveBeenCalledWith(
new ArsenalError('BadRequest'), null, response, undefined, log,
);
});
});

View File

@@ -1,222 +0,0 @@
const werelogs = require('werelogs');
const http = require('http');
const assert = require('assert');
const { StatsClient } = require('../../../lib/metrics');
const { routesUtils } = require('../../../lib/s3routes');
const { default: errors } = require('../../../lib/errors');
const { default: routerWebsite } = require('../../../lib/s3routes/routes/routeWebsite');
const logger = new werelogs.Logger('routeWebsite', 'debug', 'debug');
const log = logger.newRequestLogger();
describe('routerWebsite', () => {
let request;
let response;
let api;
let statsClient;
let dataRetrievalParams;
beforeEach(() => {
request = new http.IncomingMessage();
request.connection = { encrypted: true };
response = new http.ServerResponse(request);
api = { callApiMethod: jest.fn() };
statsClient = new StatsClient();
dataRetrievalParams = {};
routesUtils.errorHtmlResponse = jest.fn();
routesUtils.redirectRequest = jest.fn();
routesUtils.redirectRequestOnError = jest.fn();
routesUtils.errorHeaderResponse = jest.fn();
routesUtils.streamUserErrorPage = jest.fn();
routesUtils.responseStreamData = jest.fn();
routesUtils.statsReport500 = jest.fn();
routesUtils.responseContentHeaders = jest.fn();
});
it('should return MethodNotAllowed error if request method is not GET or HEAD', () => {
request.method = 'POST';
request.bucketName = 'test-bucket';
routerWebsite(request, response, api, log, statsClient, dataRetrievalParams);
assert(routesUtils.errorHtmlResponse.mock.calls.length > 0);
assert.strictEqual(routesUtils.errorHtmlResponse.mock.calls[0][0].code, 405);
});
it('should return MethodNotAllowed error if no bucketName is present', () => {
request.method = 'GET';
request.bucketName = undefined;
routerWebsite(request, response, api, log, statsClient, dataRetrievalParams);
assert(routesUtils.errorHtmlResponse.mock.calls.length > 0);
assert.strictEqual(routesUtils.errorHtmlResponse.mock.calls[0][0].code, 405);
});
it('should call websiteGet when request method is GET', () => {
request.method = 'GET';
request.bucketName = 'test-bucket';
routerWebsite(request, response, api, log, statsClient, dataRetrievalParams);
expect(api.callApiMethod).toHaveBeenCalledWith(
'websiteGet', request, response, log, expect.any(Function),
);
});
it('should call websiteHead when request method is HEAD', () => {
request.method = 'HEAD';
request.bucketName = 'test-bucket';
routerWebsite(request, response, api, log, statsClient, dataRetrievalParams);
expect(api.callApiMethod).toHaveBeenCalledWith(
'websiteHead', request, response, log, expect.any(Function),
);
});
it('should handle error with HEAD and call redirectRequestOnError', () => {
request.method = 'HEAD';
request.bucketName = 'test-bucket';
const mockRedirectInfo = { withError: true };
api.callApiMethod.mockImplementation((method, req, res, log, cb) => {
cb(errors.InternalError, null, mockRedirectInfo, null);
});
routerWebsite(request, response, api, log, statsClient, dataRetrievalParams);
expect(routesUtils.redirectRequestOnError).toHaveBeenCalledTimes(1);
});
it('should handle error with HEAD and call redirectRequest', () => {
request.method = 'HEAD';
request.bucketName = 'test-bucket';
const mockRedirectInfo = { withError: false };
api.callApiMethod.mockImplementation((method, req, res, log, cb) => {
cb(errors.InternalError, null, mockRedirectInfo, null);
});
routerWebsite(request, response, api, log, statsClient, dataRetrievalParams);
expect(routesUtils.redirectRequest).toHaveBeenCalledTimes(1);
});
it('should handle error with HEAD and call errorHeaderResponse', () => {
request.method = 'HEAD';
request.bucketName = 'test-bucket';
api.callApiMethod.mockImplementation((method, req, res, log, cb) => {
cb(errors.InternalError, null, null, null);
});
routerWebsite(request, response, api, log, statsClient, dataRetrievalParams);
expect(routesUtils.errorHeaderResponse).toHaveBeenCalledTimes(1);
});
it('should call responseContentHeaders', () => {
request.method = 'HEAD';
request.bucketName = 'test-bucket';
api.callApiMethod.mockImplementation((method, req, res, log, cb) => {
cb(null, null, null, null);
});
routerWebsite(request, response, api, log, statsClient, dataRetrievalParams);
expect(routesUtils.responseContentHeaders).toHaveBeenCalledTimes(1);
});
it('should handle redirect in websiteGet correctly', () => {
request.method = 'GET';
request.bucketName = 'test-bucket';
const mockRedirectInfo = { withError: false };
api.callApiMethod.mockImplementation((method, req, res, log, cb) => {
cb(null, null, null, null, mockRedirectInfo, 'some-key');
});
routerWebsite(request, response, api, log, statsClient, dataRetrievalParams);
expect(routesUtils.redirectRequest).toHaveBeenCalledWith(
mockRedirectInfo, 'some-key', true, response, request.headers.host, null, log,
);
});
it('should handle error in websiteGet and send default error response', () => {
request.method = 'GET';
request.bucketName = 'test-bucket';
const mockError = errors.InternalError;
api.callApiMethod.mockImplementation((method, req, res, log, cb) => {
cb(mockError, null, null, null, null, null);
});
routerWebsite(request, response, api, log, statsClient, dataRetrievalParams);
expect(routesUtils.errorHtmlResponse).toHaveBeenCalledWith(
mockError, null, 'test-bucket', response, null, log,
);
});
it('should handle error in websiteGet and call redirectRequestOnError', () => {
request.method = 'GET';
request.bucketName = 'test-bucket';
const mockRedirectInfo = { withError: true };
const mockError = errors.InternalError;
api.callApiMethod.mockImplementation((method, req, res, log, cb) => {
cb(mockError, null, null, null, mockRedirectInfo, null);
});
routerWebsite(request, response, api, log, statsClient, dataRetrievalParams);
expect(routesUtils.redirectRequestOnError).toHaveBeenCalledTimes(1);
});
it('should handle error and call streamUserErrorPage', () => {
request.method = 'GET';
request.bucketName = 'test-bucket';
const mockError = errors.InternalError;
api.callApiMethod.mockImplementation((method, req, res, log, cb) => {
cb(mockError, null, true, null, null, null);
});
routerWebsite(request, response, api, log, statsClient, dataRetrievalParams);
expect(routesUtils.streamUserErrorPage).toHaveBeenCalledTimes(1);
});
it('should handle error and call errorHtmlResponse', () => {
request.method = 'GET';
request.bucketName = 'test-bucket';
const mockError = errors.InternalError;
api.callApiMethod.mockImplementation((method, req, res, log, cb) => {
cb(mockError, null, null, null, null, null);
});
routerWebsite(request, response, api, log, statsClient, dataRetrievalParams);
expect(routesUtils.errorHtmlResponse).toHaveBeenCalledTimes(1);
});
it('should stream data if no error is present in websiteGet', () => {
request.method = 'GET';
request.bucketName = 'test-bucket';
api.callApiMethod.mockImplementation((method, req, res, log, cb) => {
cb(null, null, { content: 'data' }, null, null, 'some-key');
});
routerWebsite(request, response, api, log, statsClient, dataRetrievalParams);
expect(routesUtils.responseStreamData).toHaveBeenCalledWith(
null, undefined, null, { content: 'data' }, dataRetrievalParams, response, undefined, log,
);
});
});

View File

@@ -1,134 +0,0 @@
const http = require('http');
const werelogs = require('werelogs');
const { default: errors } = require('../../../../lib/errors');
const { JSONResponseBackend } = require('../../../../lib/s3routes/routesUtils');
const logger = new werelogs.Logger('JSONResponseBackend', 'debug', 'debug');
const log = logger.newRequestLogger();
describe('JSONResponseBackend', () => {
let request;
let response;
beforeEach(() => {
request = new http.IncomingMessage();
response = new http.ServerResponse(request);
response.writeHead = jest.fn();
response.end = jest.fn();
response.setHeader = jest.fn();
log.addDefaultFields = jest.fn();
});
describe('okResponse', () => {
it('should send a JSON response with 200 status code', () => {
const json = '{"message":"Success"}';
JSONResponseBackend.okResponse(json, response, log);
const bytesSent = Buffer.byteLength(json);
expect(response.writeHead).toHaveBeenCalledWith(200, { 'Content-type': 'application/json' });
expect(response.end).toHaveBeenCalledWith(json, 'utf8', expect.any(Function));
expect(log.addDefaultFields).toHaveBeenCalledWith({ bytesSent });
});
it('should include additional headers in the response', () => {
const json = '{"message":"Success"}';
const additionalHeaders = { 'x-custom-header': 'value' };
JSONResponseBackend.okResponse(json, response, log, additionalHeaders);
expect(response.setHeader).toHaveBeenCalledWith('x-custom-header', 'value');
expect(response.writeHead).toHaveBeenCalledWith(200, {
'Content-type': 'application/json',
});
});
});
describe('errorResponse', () => {
it('should handle ArsenalError and return appropriate JSON error response', () => {
const errCode = errors.NoSuchKey;
JSONResponseBackend.errorResponse(errCode, response, log);
const expectedJSON = JSON.stringify({
code: 'NoSuchKey',
message: 'The specified key does not exist.',
resource: null,
requestId: log.getSerializedUids(),
});
const bytesSent = Buffer.byteLength(expectedJSON);
expect(response.end).toHaveBeenCalledWith(expectedJSON, 'utf8', expect.any(Function));
expect(response.writeHead).toHaveBeenCalledWith(404, {
'Content-Type': 'application/json',
'Content-Length': bytesSent,
});
expect(log.addDefaultFields).toHaveBeenCalledWith({ bytesSent });
});
it('should handle standard Error and return InternalError as JSON', () => {
const errCode = new Error('Some error occurred');
JSONResponseBackend.errorResponse(errCode, response, log);
const internalError = errors.InternalError.customizeDescription('Some error occurred');
const expectedJSON = JSON.stringify({
code: internalError.message,
message: internalError.description,
resource: null,
requestId: log.getSerializedUids(),
});
const bytesSent = Buffer.byteLength(expectedJSON);
expect(response.writeHead).toHaveBeenCalledWith(500, {
'Content-Type': 'application/json',
'Content-Length': bytesSent,
});
expect(response.end).toHaveBeenCalledWith(expectedJSON, 'utf8', expect.any(Function));
expect(log.addDefaultFields).toHaveBeenCalledWith({ bytesSent });
});
it('should return 304 without body if error code is 304', () => {
const errCode = errors.NotModified;
JSONResponseBackend.errorResponse(errCode, response, log);
expect(response.writeHead).toHaveBeenCalledWith(304, {
'Content-Length': 99,
'Content-Type': 'application/json',
});
});
it('should include invalidArguments metadata if present in the error', () => {
const errCode = errors.InvalidArgument;
errCode.metadata.set('invalidArguments', [
{ ArgumentName: 'arg1', ArgumentValue: 'value1' },
{ ArgumentName: 'arg2', ArgumentValue: 'value2' },
]);
JSONResponseBackend.errorResponse(errCode, response, log);
const expectedJSON = JSON.stringify({
code: 'InvalidArgument',
message: 'Invalid Argument',
ArgumentName1: 'arg1',
ArgumentValue1: 'value1',
ArgumentName2: 'arg2',
ArgumentValue2: 'value2',
resource: null,
requestId: log.getSerializedUids(),
});
const bytesSent = Buffer.byteLength(expectedJSON);
expect(response.end).toHaveBeenCalledWith(expectedJSON, 'utf8', expect.any(Function));
expect(response.writeHead).toHaveBeenCalledWith(400, {
'Content-Type': 'application/json',
'Content-Length': bytesSent,
});
expect(log.addDefaultFields).toHaveBeenCalledWith({ bytesSent });
});
});
});

View File

@@ -1,141 +0,0 @@
const http = require('http');
const werelogs = require('werelogs');
const { default: errors } = require('../../../../lib/errors');
const { XMLResponseBackend } = require('../../../../lib/s3routes/routesUtils');
const logger = new werelogs.Logger('XMLResponseBackend', 'debug', 'debug');
const log = logger.newRequestLogger();
describe('XMLResponseBackend', () => {
let request;
let response;
beforeEach(() => {
request = new http.IncomingMessage();
response = new http.ServerResponse(request);
response.writeHead = jest.fn();
response.end = jest.fn();
response.setHeader = jest.fn();
log.addDefaultFields = jest.fn();
});
describe('okResponse', () => {
it('should send an XML response with 200 status code', () => {
const xml = '<Response>Success</Response>';
XMLResponseBackend.okResponse(xml, response, log);
const bytesSent = Buffer.byteLength(xml);
expect(response.writeHead).toHaveBeenCalledWith(200, { 'Content-type': 'application/xml' });
expect(response.end).toHaveBeenCalledWith(xml, 'utf8', expect.any(Function));
expect(log.addDefaultFields).toHaveBeenCalledWith({ bytesSent });
});
it('should include additional headers in the response', () => {
const xml = '<Response>Success</Response>';
const additionalHeaders = { 'x-custom-header': 'value' };
XMLResponseBackend.okResponse(xml, response, log, additionalHeaders);
expect(response.setHeader).toHaveBeenCalledWith('x-custom-header', 'value');
expect(response.writeHead).toHaveBeenCalledWith(200, {
'Content-type': 'application/xml',
});
});
});
describe('errorResponse', () => {
it('should handle ArsenalError and return appropriate XML error response', () => {
const errCode = errors.NoSuchKey;
XMLResponseBackend.errorResponse(errCode, response, log);
const expectedXML = [
'<?xml version="1.0" encoding="UTF-8"?>',
'<Error>',
'<Code>NoSuchKey</Code>',
'<Message>The specified key does not exist.</Message>',
'<Resource></Resource>',
`<RequestId>${log.getSerializedUids()}</RequestId>`,
'</Error>',
].join('');
const bytesSent = Buffer.byteLength(expectedXML);
expect(response.end).toHaveBeenCalledWith(expectedXML, 'utf8', expect.any(Function));
expect(response.writeHead).toHaveBeenCalledWith(404, {
'Content-Type': 'application/xml',
'Content-Length': bytesSent,
});
expect(log.addDefaultFields).toHaveBeenCalledWith({ bytesSent });
});
it('should handle standard Error and return InternalError as XML', () => {
const errCode = new Error('Some error occurred');
XMLResponseBackend.errorResponse(errCode, response, log);
const internalError = errors.InternalError.customizeDescription('Some error occurred');
const expectedXML = [
'<?xml version="1.0" encoding="UTF-8"?>',
'<Error>',
`<Code>${internalError.message}</Code>`,
`<Message>${internalError.description}</Message>`,
'<Resource></Resource>',
`<RequestId>${log.getSerializedUids()}</RequestId>`,
'</Error>',
].join('');
const bytesSent = Buffer.byteLength(expectedXML);
expect(response.writeHead).toHaveBeenCalledWith(500, {
'Content-Type': 'application/xml',
'Content-Length': bytesSent,
});
expect(response.end).toHaveBeenCalledWith(expectedXML, 'utf8', expect.any(Function));
expect(log.addDefaultFields).toHaveBeenCalledWith({ bytesSent });
});
it('should return 304 without body if error code is 304', () => {
const errCode = errors.NotModified;
XMLResponseBackend.errorResponse(errCode, response, log);
expect(response.writeHead).toHaveBeenCalledWith(304);
expect(response.end).toHaveBeenCalledWith('', 'utf8', expect.any(Function));
});
it('should include invalidArguments metadata if present in the error', () => {
const errCode = errors.InvalidArgument;
errCode.metadata.set('invalidArguments', [
{ ArgumentName: 'arg1', ArgumentValue: 'value1' },
{ ArgumentName: 'arg2', ArgumentValue: 'value2' },
]);
XMLResponseBackend.errorResponse(errCode, response, log);
const expectedXML = [
'<?xml version="1.0" encoding="UTF-8"?>',
'<Error>',
'<Code>InvalidArgument</Code>',
'<Message>Invalid Argument</Message>',
'<ArgumentName1>arg1</ArgumentName1>',
'<ArgumentValue1>value1</ArgumentValue1>',
'<ArgumentName2>arg2</ArgumentName2>',
'<ArgumentValue2>value2</ArgumentValue2>',
'<Resource></Resource>',
`<RequestId>${log.getSerializedUids()}</RequestId>`,
'</Error>',
].join('');
const bytesSent = Buffer.byteLength(expectedXML);
expect(response.end).toHaveBeenCalledWith(expectedXML, 'utf8', expect.any(Function));
expect(response.writeHead).toHaveBeenCalledWith(400, {
'Content-Type': 'application/xml',
'Content-Length': bytesSent,
});
expect(log.addDefaultFields).toHaveBeenCalledWith({ bytesSent });
});
});
});

View File

@@ -1,119 +0,0 @@
const http = require('http');
const werelogs = require('werelogs');
const { default: errors } = require('../../../../lib/errors');
const { errorHtmlResponse } = require('../../../../lib/s3routes/routesUtils');
const logger = new werelogs.Logger('ErrorHtmlResponse', 'debug', 'debug');
const log = logger.newRequestLogger();
describe('errorHtmlResponse', () => {
let response;
beforeEach(() => {
response = new http.ServerResponse(new http.IncomingMessage());
response.writeHead = jest.fn();
response.end = jest.fn();
log.addDefaultFields = jest.fn();
});
it('should send HTML response for ArsenalError', () => {
const err = errors.NoSuchKey;
const bucketName = 'test-bucket';
const corsHeaders = null;
const userErrorPageFailure = false;
response.statusMessage = 'Not Found';
errorHtmlResponse(err, userErrorPageFailure, bucketName, response, corsHeaders, log);
const expectedHtml = [
'<html>',
'<head>',
'<title>404 Not Found</title>',
'</head>',
'<body>',
'<h1>404 Not Found</h1>',
'<ul>',
'<li>Code: NoSuchKey</li>',
'<li>Message: The specified key does not exist.</li>',
'<li>BucketName: test-bucket</li>',
'<li>RequestId: ', log.getSerializedUids(), '</li>',
'</ul>',
'<hr/>',
'</body>',
'</html>',
].join('');
expect(response.writeHead).toHaveBeenCalledWith(404, { 'Content-type': 'text/html' });
expect(response.end).toHaveBeenCalledWith(expectedHtml, 'utf8', expect.any(Function));
});
it('should send HTML response for standard Error', () => {
const err = new Error('Some error occurred');
const bucketName = 'test-bucket';
const corsHeaders = null;
const userErrorPageFailure = false;
response.statusMessage = 'Internal Server Error';
errorHtmlResponse(err, userErrorPageFailure, bucketName, response, corsHeaders, log);
const internalError = errors.InternalError.customizeDescription('Some error occurred');
const expectedHtml = [
'<html>',
'<head>',
'<title>500 Internal Server Error</title>',
'</head>',
'<body>',
'<h1>500 Internal Server Error</h1>',
'<ul>',
`<li>Code: ${internalError.message}</li>`,
`<li>Message: ${internalError.description}</li>`,
'<li>BucketName: test-bucket</li>',
`<li>RequestId: ${log.getSerializedUids()}</li>`,
'</ul>',
'<hr/>',
'</body>',
'</html>',
].join('');
expect(response.writeHead).toHaveBeenCalledWith(500, { 'Content-type': 'text/html' });
expect(response.end).toHaveBeenCalledWith(expectedHtml, 'utf8', expect.any(Function));
});
it('should not include bucket name when userErrorPageFailure is true', () => {
const err = errors.NoSuchKey;
const bucketName = 'test-bucket';
const corsHeaders = null;
const userErrorPageFailure = true;
response.statusMessage = 'Not Found';
errorHtmlResponse(err, userErrorPageFailure, bucketName, response, corsHeaders, log);
const expectedHtml = [
'<html>',
'<head>',
'<title>404 Not Found</title>',
'</head>',
'<body>',
'<h1>404 Not Found</h1>',
'<ul>',
'<li>Code: NoSuchKey</li>',
'<li>Message: The specified key does not exist.</li>',
'<li>RequestId: ', log.getSerializedUids(), '</li>',
'</ul>',
'<h3>An Error Occurred While Attempting to Retrieve a Custom Error Document</h3>',
'<ul>',
'<li>Code: NoSuchKey</li>',
'<li>Message: The specified key does not exist.</li>',
'</ul>',
'<hr/>',
'</body>',
'</html>',
].join('');
expect(response.writeHead).toHaveBeenCalledWith(404, { 'Content-type': 'text/html' });
expect(response.end).toHaveBeenCalledWith(expectedHtml, 'utf8', expect.any(Function));
});
});

View File

@@ -1,4 +1,4 @@
const { v4: uuid } = require('uuid');
const uuid = require('uuid/v4');
const { EventEmitter } = require('events');
const assert = require('assert');

View File

@@ -40,9 +40,7 @@ describe('MongoClientInterface::_handleResults', () => {
};
const testResults = mongoTestClient._handleResults(testInput, true);
const expectedRes = {
versions: 0,
objects: 0,
stalled: 0,
versions: 0, objects: 0,
dataManaged: {
total: { curr: 0, prev: 0 },
locations: {},
@@ -59,9 +57,7 @@ describe('MongoClientInterface::_handleResults', () => {
};
const testResults = mongoTestClient._handleResults(testInput, false);
const expectedRes = {
versions: 0,
objects: 4,
stalled: 0,
versions: 0, objects: 4,
dataManaged: {
total: { curr: 40, prev: 0 },
locations: {
@@ -81,9 +77,7 @@ describe('MongoClientInterface::_handleResults', () => {
};
const testResults = mongoTestClient._handleResults(testInput, true);
const expectedRes = {
versions: 2,
objects: 4,
stalled: 0,
versions: 2, objects: 4,
dataManaged: {
total: { curr: 40, prev: 20 },
locations: {
@@ -569,7 +563,7 @@ describe('MongoClientInterface, tests', () => {
mongoserver.start().then(() => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
replicaSetHosts: '127.0.0.1:27021',
replicaSetHosts: 'localhost:27021',
writeConcern: 'majority',
replicaSet: 'customSetName',
readPreference: 'primary',
@@ -581,7 +575,7 @@ describe('MongoClientInterface, tests', () => {
client.setup(() => done());
});
});
}, 600 * 1000);
});
afterAll(done => {
async.series([
@@ -782,34 +776,9 @@ describe('MongoClientInterface, tests', () => {
});
const bucketName = 'test-bucket';
const capabilityName = 'VeeamSOSApi';
const capabilityField = 'CapacityInfo';
const capabilityValue = {
Capacity: 1n,
Available: 1n,
Used: 0n,
LastModified: '2021-09-29T14:00:00.000Z',
};
it('should update the bucket with quota', done => {
const quotaValue = 1099511627776000n;
async.waterfall([
next => createBucket(client, bucketName, false, err => next(err)),
next => {
const bucketMD = new BucketInfo(bucketName, 'testowner',
'testdisplayname', new Date().toJSON(),
BucketInfo.currentModelVersion());
bucketMD.setQuota(quotaValue);
client.putBucketAttributes(bucketName, bucketMD, logger, err => next(err));
},
next => client.getBucketAttributes(bucketName, logger, (err, bucketMd) => {
assert(!err);
assert.strictEqual(bucketMd._quotaMax, quotaValue);
return next();
}),
next => client.deleteBucket(bucketName, logger, err => next(err)),
], done);
});
const capabilityName = 'testCapability';
const capabilityField = 'testField';
const capabilityValue = { key: 'value' };
it('should add a capability to a bucket', done => {
async.waterfall([
@@ -834,13 +803,13 @@ describe('MongoClientInterface, tests', () => {
next => client.putBucketAttributesCapabilities(
bucketName, capabilityName, capabilityField, capabilityValue, logger, err => next(err)),
next => client.deleteBucketAttributesCapability(
bucketName, capabilityName, '', logger, err => next(err)),
bucketName, capabilityName, capabilityField, logger, err => next(err)),
next => client.getBucketAttributes(bucketName, logger, (err, bucketInfo) => {
if (err) {
return next(err);
}
const capabilities = bucketInfo._capabilities || {};
assert(!capabilities[capabilityName]);
assert(!capabilities[capabilityName][capabilityField]);
return next();
}),
next => client.deleteBucket(bucketName, logger, err => next(err)),

View File

@@ -34,7 +34,7 @@ describe('MongoClientInterface:delObject', () => {
});
it('delObject::should fail when getBucketVFormat fails', done => {
sinon.stub(client, 'getCollection').callsFake(() => ({}));
sinon.stub(client, 'getCollection').callsFake(() => null);
sinon.stub(client, 'getBucketVFormat').callsFake((bucketName, log, cb) => cb(errors.InternalError));
client.deleteObject('example-bucket', 'example-object', {}, logger, err => {
assert(err.is.InternalError);
@@ -43,18 +43,18 @@ describe('MongoClientInterface:delObject', () => {
});
it('delObject::should call deleteObjectNoVer when no versionId', done => {
sinon.stub(client, 'getCollection').callsFake(() => ({}));
sinon.stub(client, 'getCollection').callsFake(() => null);
sinon.stub(client, 'getBucketVFormat').callsFake((bucketName, log, cb) => cb(null, 'v0'));
const deleteObjectNoVerSpy = sinon.spy();
sinon.stub(client, 'deleteObjectNoVer').callsFake(deleteObjectNoVerSpy);
client.deleteObject('example-bucket', 'example-object', {}, logger, {});
const args = [{}, 'example-bucket', 'example-object', { vFormat: 'v0' }, logger, {}];
const args = [null, 'example-bucket', 'example-object', { vFormat: 'v0' }, logger, {}];
assert(deleteObjectNoVerSpy.calledOnceWith(...args));
return done();
});
it('delObject::should call deleteObjectVer when no versionId', done => {
sinon.stub(client, 'getCollection').callsFake(() => ({}));
sinon.stub(client, 'getCollection').callsFake(() => null);
sinon.stub(client, 'getBucketVFormat').callsFake((bucketName, log, cb) => cb(null, 'v0'));
const deleteObjectVerSpy = sinon.spy();
sinon.stub(client, 'deleteObjectVer').callsFake(deleteObjectVerSpy);
@@ -63,7 +63,7 @@ describe('MongoClientInterface:delObject', () => {
};
client.deleteObject('example-bucket', 'example-object', params, logger, {});
params.vFormat = 'v0';
const args = [{}, 'example-bucket', 'example-object', params, logger, {}];
const args = [null, 'example-bucket', 'example-object', params, logger, {}];
assert(deleteObjectVerSpy.calledOnceWith(...args));
return done();
});
@@ -201,7 +201,6 @@ describe('MongoClientInterface:delObject', () => {
const objVal = {
originOp: 's3:ObjectCreated:Put',
};
sinon.restore();
client.repair(collection, 'example-bucket', 'example-object', objVal, master, 'v0', logger, () => {
assert.deepEqual(collection.findOneAndReplace.args[0][1], {
_id: 'example-object',

View File

@@ -20,7 +20,7 @@ describe('MongoClientInterface:listObject', () => {
});
it('should fail when getBucketVFormat fails', done => {
sinon.stub(client, 'getCollection').callsFake(() => ({}));
sinon.stub(client, 'getCollection').callsFake(() => null);
sinon.stub(client, 'getBucketVFormat').callsFake((bucketName, log, cb) => cb(errors.InternalError));
client.listObject('example-bucket', { listingType: 'DelimiterMaster' }, logger, err => {
assert.deepStrictEqual(err, errors.InternalError);
@@ -29,7 +29,7 @@ describe('MongoClientInterface:listObject', () => {
});
it('should fail when internalListObject fails', done => {
sinon.stub(client, 'getCollection').callsFake(() => ({}));
sinon.stub(client, 'getCollection').callsFake(() => null);
sinon.stub(client, 'getBucketVFormat').callsFake((bucketName, log, cb) => cb(null, 'v0'));
sinon.stub(client, 'internalListObject').callsFake((...args) => args[5](errors.InternalError));
client.listObject('example-bucket', { listingType: 'DelimiterMaster' }, logger, err => {

View File

@@ -6,9 +6,6 @@ const sinon = require('sinon');
const MongoClientInterface =
require('../../../../../lib/storage/metadata/mongoclient/MongoClientInterface');
const utils = require('../../../../../lib/storage/metadata/mongoclient/utils');
const DummyRequestLogger = require('../../../helpers').DummyRequestLogger;
const log = new DummyRequestLogger();
describe('MongoClientInterface:putObject', () => {
let client;
@@ -19,7 +16,7 @@ describe('MongoClientInterface:putObject', () => {
});
beforeEach(done => {
sinon.stub(client, 'getCollection').callsFake(() => ({}));
sinon.stub(client, 'getCollection').callsFake(() => null);
return done();
});
@@ -30,7 +27,7 @@ describe('MongoClientInterface:putObject', () => {
it('Should fail when getBucketVFormat fails', done => {
sinon.stub(client, 'getBucketVFormat').callsFake((bucketName, log, cb) => cb(errors.InternalError));
client.putObject('example-bucket', 'example-object', {}, {}, log, err => {
client.putObject('example-bucket', 'example-object', {}, {}, {}, err => {
assert.deepStrictEqual(err, errors.InternalError);
return done();
});
@@ -42,8 +39,8 @@ describe('MongoClientInterface:putObject', () => {
sinon.stub(client, 'getBucketVFormat').callsFake((bucketName, log, cb) => cb(null, 'v0'));
sinon.stub(client, 'putObjectNoVer').callsFake(putObjectNoVerSpy);
// checking if function called with correct params
client.putObject('example-bucket', 'example-object', {}, {}, log, {});
const args = [{}, 'example-bucket', 'example-object', {}, { vFormat: 'v0' }, log, {}];
client.putObject('example-bucket', 'example-object', {}, {}, {}, {});
const args = [null, 'example-bucket', 'example-object', {}, { vFormat: 'v0' }, {}, {}];
assert(putObjectNoVerSpy.calledOnceWith(...args));
return done();
});
@@ -59,9 +56,9 @@ describe('MongoClientInterface:putObject', () => {
versionId: null,
repairMaster: null,
};
client.putObject('example-bucket', 'example-object', {}, params, log, {});
client.putObject('example-bucket', 'example-object', {}, params, {}, {});
params.vFormat = 'v0';
const args = [{}, 'example-bucket', 'example-object', {}, params, log, {}];
const args = [null, 'example-bucket', 'example-object', {}, params, {}, {}];
assert(putObjectVerCase1Spy.calledOnceWith(...args));
return done();
});
@@ -77,9 +74,9 @@ describe('MongoClientInterface:putObject', () => {
versionId: '',
repairMaster: null,
};
client.putObject('example-bucket', 'example-object', {}, params, log, {});
client.putObject('example-bucket', 'example-object', {}, params, {}, {});
params.vFormat = 'v0';
const args = [{}, 'example-bucket', 'example-object', {}, params, log, {}];
const args = [null, 'example-bucket', 'example-object', {}, params, {}, {}];
assert(putObjectVerCase2Spy.calledOnceWith(...args));
return done();
});
@@ -95,9 +92,9 @@ describe('MongoClientInterface:putObject', () => {
versionId: '1234',
repairMaster: false,
};
client.putObject('example-bucket', 'example-object', {}, params, log, {});
client.putObject('example-bucket', 'example-object', {}, params, {}, {});
params.vFormat = 'v0';
const args = [{}, 'example-bucket', 'example-object', {}, params, log, {}];
const args = [null, 'example-bucket', 'example-object', {}, params, {}, {}];
assert(putObjectVerCase3Spy.calledOnceWith(...args));
return done();
});
@@ -113,9 +110,9 @@ describe('MongoClientInterface:putObject', () => {
versionId: '1234',
repairMaster: true,
};
client.putObject('example-bucket', 'example-object', {}, params, log, {});
client.putObject('example-bucket', 'example-object', {}, params, {}, {});
params.vFormat = 'v0';
const args = [{}, 'example-bucket', 'example-object', {}, params, log, {}];
const args = [null, 'example-bucket', 'example-object', {}, params, {}, {}];
assert(putObjectVerCase4Spy.calledOnceWith(...args));
return done();
});
@@ -125,7 +122,7 @@ describe('MongoClientInterface:putObject', () => {
sinon.stub(client, 'getBucketVFormat').callsFake((bucketName, log, cb) => cb(null, 'v0'));
sinon.stub(client, 'putObjectNoVer').callsFake((...args) => args[6](errors.InternalError));
// checking if function called with correct params
client.putObject('example-bucket', 'example-object', {}, {}, log, err => {
client.putObject('example-bucket', 'example-object', {}, {}, {}, err => {
assert.deepStrictEqual(err, errors.InternalError);
return done();
});
@@ -140,7 +137,7 @@ describe('MongoClientInterface:putObject', () => {
versionId: null,
repairMaster: null,
};
client.putObject('example-bucket', 'example-object', {}, params, log, err => {
client.putObject('example-bucket', 'example-object', {}, params, {}, err => {
assert.deepStrictEqual(err, errors.InternalError);
return done();
});
@@ -155,7 +152,7 @@ describe('MongoClientInterface:putObject', () => {
versionId: '',
repairMaster: null,
};
client.putObject('example-bucket', 'example-object', {}, params, log, err => {
client.putObject('example-bucket', 'example-object', {}, params, {}, err => {
assert.deepStrictEqual(err, errors.InternalError);
return done();
});
@@ -170,7 +167,7 @@ describe('MongoClientInterface:putObject', () => {
versionId: '1234',
repairMaster: null,
};
client.putObject('example-bucket', 'example-object', {}, params, log, err => {
client.putObject('example-bucket', 'example-object', {}, params, {}, err => {
assert.deepStrictEqual(err, errors.InternalError);
return done();
});
@@ -185,7 +182,7 @@ describe('MongoClientInterface:putObject', () => {
versionId: '1234',
repairMaster: true,
};
client.putObject('example-bucket', 'example-object', {}, params, log, err => {
client.putObject('example-bucket', 'example-object', {}, params, {}, err => {
assert.deepStrictEqual(err, errors.InternalError);
return done();
});
@@ -452,7 +449,7 @@ describe('MongoClientInterface:putObjectNoVer', () => {
updateOne: () => Promise.resolve({}),
};
client.putObjectNoVer(collection, 'example-bucket', 'example-object', {}, {}, logger, err => {
assert.deepStrictEqual(err, null);
assert.deepStrictEqual(err, undefined);
return done();
});
});

View File

@@ -26,7 +26,7 @@ describe('MongoClientInterface:putObjectWithCond', () => {
});
it('should fail when getBucketVFormat fails', done => {
sinon.stub(client, 'getCollection').callsFake(() => ({}));
sinon.stub(client, 'getCollection').callsFake(() => null);
sinon.stub(client, 'getBucketVFormat').callsFake((bucketName, log, cb) => cb(errors.InternalError));
client.putObjectWithCond('example-bucket', 'example-object', {}, {}, logger, err => {
assert.deepStrictEqual(err, errors.InternalError);
@@ -35,7 +35,7 @@ describe('MongoClientInterface:putObjectWithCond', () => {
});
it('should fail when getBucketVFormat fails', done => {
sinon.stub(client, 'getCollection').callsFake(() => ({}));
sinon.stub(client, 'getCollection').callsFake(() => null);
sinon.stub(client, 'getBucketVFormat').callsFake((bucketName, log, cb) => cb(null));
sinon.stub(utils, 'translateConditions').callsFake(() => {throw errors.InternalError;});
client.putObjectWithCond('example-bucket', 'example-object', {}, {}, logger, err => {
@@ -77,7 +77,7 @@ describe('MongoClientInterface:deleteObjectWithCond', () => {
});
it('should fail when getBucketVFormat fails', done => {
sinon.stub(client, 'getCollection').callsFake(() => ({}));
sinon.stub(client, 'getCollection').callsFake(() => null);
sinon.stub(client, 'getBucketVFormat').callsFake((bucketName, log, cb) => cb(errors.InternalError));
client.deleteObjectWithCond('example-bucket', 'example-object', {}, logger, err => {
assert.deepStrictEqual(err, errors.InternalError);
@@ -86,7 +86,7 @@ describe('MongoClientInterface:deleteObjectWithCond', () => {
});
it('should fail when getBucketVFormat fails', done => {
sinon.stub(client, 'getCollection').callsFake(() => ({}));
sinon.stub(client, 'getCollection').callsFake(() => null);
sinon.stub(client, 'getBucketVFormat').callsFake((bucketName, log, cb) => cb(null));
sinon.stub(utils, 'translateConditions').callsFake(() => {throw errors.InternalError;});
client.deleteObjectWithCond('example-bucket', 'example-object', {}, logger, err => {
@@ -96,7 +96,7 @@ describe('MongoClientInterface:deleteObjectWithCond', () => {
});
it('should fail when internalDeleteObject fails', done => {
sinon.stub(client, 'getCollection').callsFake(() => ({}));
sinon.stub(client, 'getCollection').callsFake(() => {});
sinon.stub(client, 'getBucketVFormat').callsFake((bucketName, log, cb) => cb(null));
sinon.stub(utils, 'translateConditions').callsFake(() => null);
sinon.stub(client, 'internalDeleteObject').callsArgWith(6, errors.InternalError);

View File

@@ -81,7 +81,6 @@
"requests": {
"viaProxy": true,
"trustedProxyCIDRs": ["192.168.100.0/22"],
"extractClientIPFromHeader": "x-forwarded-for",
"extractProtocolFromHeader": "x-forwarded-proto"
"extractClientIPFromHeader": "x-forwarded-for"
}
}

View File

@@ -4,7 +4,7 @@
const assert = require('assert');
const crypto = require('crypto');
const { v4: uuidv4 } = require('uuid');
const uuidv4 = require('uuid/v4');
const {
EchoChannel,

View File

@@ -1,6 +1,6 @@
{
"compilerOptions": {
"target": "ES2021",
"target": "es2020",
"module": "commonjs",
"rootDir": "./",
"resolveJsonModule": true,